CMS 3D CMS Logo

HGVHistoProducerAlgo.cc
Go to the documentation of this file.
1 #include <numeric>
2 #include <iomanip>
3 
7 #include "TMath.h"
8 #include "TLatex.h"
9 #include "TF1.h"
10 
11 using namespace std;
12 
13 //Parameters for the score cut. Later, this will become part of the
14 //configuration parameter for the HGCAL associator.
15 const double ScoreCutLCtoCP_ = 0.1;
16 const double ScoreCutCPtoLC_ = 0.1;
17 const double ScoreCutMCLtoCPFakeMerge_ = 0.6;
18 const double ScoreCutCPtoMCLDup_ = 0.2;
19 
21  : //parameters for eta
22  minEta_(pset.getParameter<double>("minEta")),
23  maxEta_(pset.getParameter<double>("maxEta")),
24  nintEta_(pset.getParameter<int>("nintEta")),
25  useFabsEta_(pset.getParameter<bool>("useFabsEta")),
26 
27  //parameters for energy
28  minEne_(pset.getParameter<double>("minEne")),
29  maxEne_(pset.getParameter<double>("maxEne")),
30  nintEne_(pset.getParameter<int>("nintEne")),
31 
32  //parameters for pt
33  minPt_(pset.getParameter<double>("minPt")),
34  maxPt_(pset.getParameter<double>("maxPt")),
35  nintPt_(pset.getParameter<int>("nintPt")),
36 
37  //parameters for phi
38  minPhi_(pset.getParameter<double>("minPhi")),
39  maxPhi_(pset.getParameter<double>("maxPhi")),
40  nintPhi_(pset.getParameter<int>("nintPhi")),
41 
42  //parameters for counting mixed hits clusters
43  minMixedHitsCluster_(pset.getParameter<double>("minMixedHitsCluster")),
44  maxMixedHitsCluster_(pset.getParameter<double>("maxMixedHitsCluster")),
45  nintMixedHitsCluster_(pset.getParameter<int>("nintMixedHitsCluster")),
46 
47  //parameters for the total amount of energy clustered by all layer clusters (fraction over caloparticles)
48  minEneCl_(pset.getParameter<double>("minEneCl")),
49  maxEneCl_(pset.getParameter<double>("maxEneCl")),
50  nintEneCl_(pset.getParameter<int>("nintEneCl")),
51 
52  //parameters for the longitudinal depth barycenter.
53  minLongDepBary_(pset.getParameter<double>("minLongDepBary")),
54  maxLongDepBary_(pset.getParameter<double>("maxLongDepBary")),
55  nintLongDepBary_(pset.getParameter<int>("nintLongDepBary")),
56 
57  //parameters for z positionof vertex plots
58  minZpos_(pset.getParameter<double>("minZpos")),
59  maxZpos_(pset.getParameter<double>("maxZpos")),
60  nintZpos_(pset.getParameter<int>("nintZpos")),
61 
62  //Parameters for the total number of layer clusters per layer
63  minTotNClsperlay_(pset.getParameter<double>("minTotNClsperlay")),
64  maxTotNClsperlay_(pset.getParameter<double>("maxTotNClsperlay")),
65  nintTotNClsperlay_(pset.getParameter<int>("nintTotNClsperlay")),
66 
67  //Parameters for the energy clustered by layer clusters per layer (fraction over caloparticles)
68  minEneClperlay_(pset.getParameter<double>("minEneClperlay")),
69  maxEneClperlay_(pset.getParameter<double>("maxEneClperlay")),
70  nintEneClperlay_(pset.getParameter<int>("nintEneClperlay")),
71 
72  //Parameters for the score both for:
73  //1. calo particle to layer clusters association per layer
74  //2. layer cluster to calo particles association per layer
75  minScore_(pset.getParameter<double>("minScore")),
76  maxScore_(pset.getParameter<double>("maxScore")),
77  nintScore_(pset.getParameter<int>("nintScore")),
78 
79  //Parameters for shared energy fraction. That is:
80  //1. Fraction of each of the layer clusters energy related to a
81  //calo particle over that calo particle's energy.
82  //2. Fraction of each of the calo particles energy
83  //related to a layer cluster over that layer cluster's energy.
84  minSharedEneFrac_(pset.getParameter<double>("minSharedEneFrac")),
85  maxSharedEneFrac_(pset.getParameter<double>("maxSharedEneFrac")),
86  nintSharedEneFrac_(pset.getParameter<int>("nintSharedEneFrac")),
87 
88  //Same as above for multiclusters
89  minMCLSharedEneFrac_(pset.getParameter<double>("minMCLSharedEneFrac")),
90  maxMCLSharedEneFrac_(pset.getParameter<double>("maxMCLSharedEneFrac")),
91  nintMCLSharedEneFrac_(pset.getParameter<int>("nintMCLSharedEneFrac")),
92 
93  //Parameters for the total number of layer clusters per thickness
94  minTotNClsperthick_(pset.getParameter<double>("minTotNClsperthick")),
95  maxTotNClsperthick_(pset.getParameter<double>("maxTotNClsperthick")),
96  nintTotNClsperthick_(pset.getParameter<int>("nintTotNClsperthick")),
97 
98  //Parameters for the total number of cells per per thickness per layer
99  minTotNcellsperthickperlayer_(pset.getParameter<double>("minTotNcellsperthickperlayer")),
100  maxTotNcellsperthickperlayer_(pset.getParameter<double>("maxTotNcellsperthickperlayer")),
101  nintTotNcellsperthickperlayer_(pset.getParameter<int>("nintTotNcellsperthickperlayer")),
102 
103  //Parameters for the distance of cluster cells to seed cell per thickness per layer
104  minDisToSeedperthickperlayer_(pset.getParameter<double>("minDisToSeedperthickperlayer")),
105  maxDisToSeedperthickperlayer_(pset.getParameter<double>("maxDisToSeedperthickperlayer")),
106  nintDisToSeedperthickperlayer_(pset.getParameter<int>("nintDisToSeedperthickperlayer")),
107 
108  //Parameters for the energy weighted distance of cluster cells to seed cell per thickness per layer
109  minDisToSeedperthickperlayerenewei_(pset.getParameter<double>("minDisToSeedperthickperlayerenewei")),
110  maxDisToSeedperthickperlayerenewei_(pset.getParameter<double>("maxDisToSeedperthickperlayerenewei")),
111  nintDisToSeedperthickperlayerenewei_(pset.getParameter<int>("nintDisToSeedperthickperlayerenewei")),
112 
113  //Parameters for the distance of cluster cells to max cell per thickness per layer
114  minDisToMaxperthickperlayer_(pset.getParameter<double>("minDisToMaxperthickperlayer")),
115  maxDisToMaxperthickperlayer_(pset.getParameter<double>("maxDisToMaxperthickperlayer")),
116  nintDisToMaxperthickperlayer_(pset.getParameter<int>("nintDisToMaxperthickperlayer")),
117 
118  //Parameters for the energy weighted distance of cluster cells to max cell per thickness per layer
119  minDisToMaxperthickperlayerenewei_(pset.getParameter<double>("minDisToMaxperthickperlayerenewei")),
120  maxDisToMaxperthickperlayerenewei_(pset.getParameter<double>("maxDisToMaxperthickperlayerenewei")),
121  nintDisToMaxperthickperlayerenewei_(pset.getParameter<int>("nintDisToMaxperthickperlayerenewei")),
122 
123  //Parameters for the distance of seed cell to max cell per thickness per layer
124  minDisSeedToMaxperthickperlayer_(pset.getParameter<double>("minDisSeedToMaxperthickperlayer")),
125  maxDisSeedToMaxperthickperlayer_(pset.getParameter<double>("maxDisSeedToMaxperthickperlayer")),
126  nintDisSeedToMaxperthickperlayer_(pset.getParameter<int>("nintDisSeedToMaxperthickperlayer")),
127 
128  //Parameters for the energy of a cluster per thickness per layer
129  minClEneperthickperlayer_(pset.getParameter<double>("minClEneperthickperlayer")),
130  maxClEneperthickperlayer_(pset.getParameter<double>("maxClEneperthickperlayer")),
131  nintClEneperthickperlayer_(pset.getParameter<int>("nintClEneperthickperlayer")),
132 
133  //Parameters for the energy density of cluster cells per thickness
134  minCellsEneDensperthick_(pset.getParameter<double>("minCellsEneDensperthick")),
135  maxCellsEneDensperthick_(pset.getParameter<double>("maxCellsEneDensperthick")),
136  nintCellsEneDensperthick_(pset.getParameter<int>("nintCellsEneDensperthick")),
137 
138  //Parameters for the total number of multiclusters per event
139  //We always treet one event as two events, one in +z one in -z
140  minTotNMCLs_(pset.getParameter<double>("minTotNMCLs")),
141  maxTotNMCLs_(pset.getParameter<double>("maxTotNMCLs")),
142  nintTotNMCLs_(pset.getParameter<int>("nintTotNMCLs")),
143 
144  //Parameters for the total number of layer clusters in multicluster
145  minTotNClsinMCLs_(pset.getParameter<double>("minTotNClsinMCLs")),
146  maxTotNClsinMCLs_(pset.getParameter<double>("maxTotNClsinMCLs")),
147  nintTotNClsinMCLs_(pset.getParameter<int>("nintTotNClsinMCLs")),
148 
149  //Parameters for the total number of layer clusters in multicluster per layer
150  minTotNClsinMCLsperlayer_(pset.getParameter<double>("minTotNClsinMCLsperlayer")),
151  maxTotNClsinMCLsperlayer_(pset.getParameter<double>("maxTotNClsinMCLsperlayer")),
152  nintTotNClsinMCLsperlayer_(pset.getParameter<int>("nintTotNClsinMCLsperlayer")),
153 
154  //Parameters for the multiplicity of layer clusters in multicluster
155  minMplofLCs_(pset.getParameter<double>("minMplofLCs")),
156  maxMplofLCs_(pset.getParameter<double>("maxMplofLCs")),
157  nintMplofLCs_(pset.getParameter<int>("nintMplofLCs")),
158 
159  //Parameters for cluster size
160  minSizeCLsinMCLs_(pset.getParameter<double>("minSizeCLsinMCLs")),
161  maxSizeCLsinMCLs_(pset.getParameter<double>("maxSizeCLsinMCLs")),
162  nintSizeCLsinMCLs_(pset.getParameter<int>("nintSizeCLsinMCLs")),
163 
164  //Parameters for the energy of a cluster per thickness per layer
165  minClEnepermultiplicity_(pset.getParameter<double>("minClEnepermultiplicity")),
166  maxClEnepermultiplicity_(pset.getParameter<double>("maxClEnepermultiplicity")),
167  nintClEnepermultiplicity_(pset.getParameter<int>("nintClEnepermultiplicity")),
168 
169  //parameters for x
170  minX_(pset.getParameter<double>("minX")),
171  maxX_(pset.getParameter<double>("maxX")),
172  nintX_(pset.getParameter<int>("nintX")),
173 
174  //parameters for y
175  minY_(pset.getParameter<double>("minY")),
176  maxY_(pset.getParameter<double>("maxY")),
177  nintY_(pset.getParameter<int>("nintY")),
178 
179  //parameters for z
180  minZ_(pset.getParameter<double>("minZ")),
181  maxZ_(pset.getParameter<double>("maxZ")),
182  nintZ_(pset.getParameter<int>("nintZ")) {}
183 
185 
187  histograms.lastLayerEEzm = ibook.bookInt("lastLayerEEzm");
188  histograms.lastLayerFHzm = ibook.bookInt("lastLayerFHzm");
189  histograms.maxlayerzm = ibook.bookInt("maxlayerzm");
190  histograms.lastLayerEEzp = ibook.bookInt("lastLayerEEzp");
191  histograms.lastLayerFHzp = ibook.bookInt("lastLayerFHzp");
192  histograms.maxlayerzp = ibook.bookInt("maxlayerzp");
193 }
194 
196  histograms.h_caloparticle_eta[pdgid] =
197  ibook.book1D("num_caloparticle_eta", "N of caloparticle vs eta", nintEta_, minEta_, maxEta_);
198  histograms.h_caloparticle_eta_Zorigin[pdgid] =
199  ibook.book2D("Eta vs Zorigin", "Eta vs Zorigin", nintEta_, minEta_, maxEta_, nintZpos_, minZpos_, maxZpos_);
200 
201  histograms.h_caloparticle_energy[pdgid] =
202  ibook.book1D("caloparticle_energy", "Energy of caloparticle", nintEne_, minEne_, maxEne_);
203  histograms.h_caloparticle_pt[pdgid] = ibook.book1D("caloparticle_pt", "Pt of caloparticle", nintPt_, minPt_, maxPt_);
204  histograms.h_caloparticle_phi[pdgid] =
205  ibook.book1D("caloparticle_phi", "Phi of caloparticle", nintPhi_, minPhi_, maxPhi_);
206 }
207 
210  unsigned layers,
211  std::vector<int> thicknesses,
212  std::string pathtomatbudfile) {
213  //---------------------------------------------------------------------------------------------------------------------------
214  histograms.h_cluster_eta.push_back(
215  ibook.book1D("num_reco_cluster_eta", "N of reco clusters vs eta", nintEta_, minEta_, maxEta_));
216 
217  //---------------------------------------------------------------------------------------------------------------------------
218  //z-
219  histograms.h_mixedhitscluster_zminus.push_back(
220  ibook.book1D("mixedhitscluster_zminus",
221  "N of reco clusters that contain hits of more than one kind in z-",
225  //z+
226  histograms.h_mixedhitscluster_zplus.push_back(
227  ibook.book1D("mixedhitscluster_zplus",
228  "N of reco clusters that contain hits of more than one kind in z+",
232 
233  //---------------------------------------------------------------------------------------------------------------------------
234  //z-
235  histograms.h_energyclustered_zminus.push_back(
236  ibook.book1D("energyclustered_zminus",
237  "percent of total energy clustered by all layer clusters over caloparticles energy in z-",
238  nintEneCl_,
239  minEneCl_,
240  maxEneCl_));
241  //z+
242  histograms.h_energyclustered_zplus.push_back(
243  ibook.book1D("energyclustered_zplus",
244  "percent of total energy clustered by all layer clusters over caloparticles energy in z+",
245  nintEneCl_,
246  minEneCl_,
247  maxEneCl_));
248 
249  //---------------------------------------------------------------------------------------------------------------------------
250  //z-
251  std::string subpathtomat = pathtomatbudfile.substr(pathtomatbudfile.find("Validation"));
252  histograms.h_longdepthbarycentre_zminus.push_back(
253  ibook.book1D("longdepthbarycentre_zminus",
254  "The longitudinal depth barycentre in z- for " + subpathtomat,
257  maxLongDepBary_));
258  //z+
259  histograms.h_longdepthbarycentre_zplus.push_back(
260  ibook.book1D("longdepthbarycentre_zplus",
261  "The longitudinal depth barycentre in z+ for " + subpathtomat,
264  maxLongDepBary_));
265 
266  //---------------------------------------------------------------------------------------------------------------------------
267  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
268  auto istr1 = std::to_string(ilayer);
269  while (istr1.size() < 2) {
270  istr1.insert(0, "0");
271  }
272  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
273  std::string istr2 = "";
274  //First with the -z endcap
275  if (ilayer < layers) {
276  istr2 = std::to_string(ilayer + 1) + " in z-";
277  } else { //Then for the +z
278  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
279  }
280  histograms.h_clusternum_perlayer[ilayer] = ibook.book1D("totclusternum_layer_" + istr1,
281  "total number of layer clusters for layer " + istr2,
285  histograms.h_energyclustered_perlayer[ilayer] =
286  ibook.book1D("energyclustered_perlayer" + istr1,
287  "percent of total energy clustered by layer clusters over caloparticles energy for layer " + istr2,
291  histograms.h_score_layercl2caloparticle_perlayer[ilayer] =
292  ibook.book1D("Score_layercl2caloparticle_perlayer" + istr1,
293  "Score of Layer Cluster per CaloParticle for layer " + istr2,
294  nintScore_,
295  minScore_,
296  maxScore_);
297  histograms.h_score_caloparticle2layercl_perlayer[ilayer] =
298  ibook.book1D("Score_caloparticle2layercl_perlayer" + istr1,
299  "Score of CaloParticle per Layer Cluster for layer " + istr2,
300  nintScore_,
301  minScore_,
302  maxScore_);
304  ibook.book2D("Energy_vs_Score_caloparticle2layer_perlayer" + istr1,
305  "Energy vs Score of CaloParticle per Layer Cluster for layer " + istr2,
306  nintScore_,
307  minScore_,
308  maxScore_,
313  ibook.book2D("Energy_vs_Score_layer2caloparticle_perlayer" + istr1,
314  "Energy vs Score of Layer Cluster per CaloParticle Layer for layer " + istr2,
315  nintScore_,
316  minScore_,
317  maxScore_,
322  ibook.book1D("SharedEnergy_caloparticle2layercl_perlayer" + istr1,
323  "Shared Energy of CaloParticle per Layer Cluster for layer " + istr2,
328  ibook.bookProfile("SharedEnergy_caloparticle2layercl_vs_eta_perlayer" + istr1,
329  "Shared Energy of CaloParticle vs #eta per best Layer Cluster for layer " + istr2,
330  nintEta_,
331  minEta_,
332  maxEta_,
336  ibook.bookProfile("SharedEnergy_caloparticle2layercl_vs_phi_perlayer" + istr1,
337  "Shared Energy of CaloParticle vs #phi per best Layer Cluster for layer " + istr2,
338  nintPhi_,
339  minPhi_,
340  maxPhi_,
344  ibook.book1D("SharedEnergy_layercluster2caloparticle_perlayer" + istr1,
345  "Shared Energy of Layer Cluster per Layer Calo Particle for layer " + istr2,
350  ibook.bookProfile("SharedEnergy_layercl2caloparticle_vs_eta_perlayer" + istr1,
351  "Shared Energy of LayerCluster vs #eta per best Calo Particle for layer " + istr2,
352  nintEta_,
353  minEta_,
354  maxEta_,
358  ibook.bookProfile("SharedEnergy_layercl2caloparticle_vs_phi_perlayer" + istr1,
359  "Shared Energy of LayerCluster vs #phi per best Calo Particle for layer " + istr2,
360  nintPhi_,
361  minPhi_,
362  maxPhi_,
365  histograms.h_num_caloparticle_eta_perlayer[ilayer] =
366  ibook.book1D("Num_CaloParticle_Eta_perlayer" + istr1,
367  "Num CaloParticle Eta per Layer Cluster for layer " + istr2,
368  nintEta_,
369  minEta_,
370  maxEta_);
371  histograms.h_numDup_caloparticle_eta_perlayer[ilayer] =
372  ibook.book1D("NumDup_CaloParticle_Eta_perlayer" + istr1,
373  "Num Duplicate CaloParticle Eta per Layer Cluster for layer " + istr2,
374  nintEta_,
375  minEta_,
376  maxEta_);
377  histograms.h_denom_caloparticle_eta_perlayer[ilayer] =
378  ibook.book1D("Denom_CaloParticle_Eta_perlayer" + istr1,
379  "Denom CaloParticle Eta per Layer Cluster for layer " + istr2,
380  nintEta_,
381  minEta_,
382  maxEta_);
383  histograms.h_num_caloparticle_phi_perlayer[ilayer] =
384  ibook.book1D("Num_CaloParticle_Phi_perlayer" + istr1,
385  "Num CaloParticle Phi per Layer Cluster for layer " + istr2,
386  nintPhi_,
387  minPhi_,
388  maxPhi_);
389  histograms.h_numDup_caloparticle_phi_perlayer[ilayer] =
390  ibook.book1D("NumDup_CaloParticle_Phi_perlayer" + istr1,
391  "Num Duplicate CaloParticle Phi per Layer Cluster for layer " + istr2,
392  nintPhi_,
393  minPhi_,
394  maxPhi_);
395  histograms.h_denom_caloparticle_phi_perlayer[ilayer] =
396  ibook.book1D("Denom_CaloParticle_Phi_perlayer" + istr1,
397  "Denom CaloParticle Phi per Layer Cluster for layer " + istr2,
398  nintPhi_,
399  minPhi_,
400  maxPhi_);
401  histograms.h_num_layercl_eta_perlayer[ilayer] =
402  ibook.book1D("Num_LayerCluster_Eta_perlayer" + istr1,
403  "Num LayerCluster Eta per Layer Cluster for layer " + istr2,
404  nintEta_,
405  minEta_,
406  maxEta_);
407  histograms.h_numMerge_layercl_eta_perlayer[ilayer] =
408  ibook.book1D("NumMerge_LayerCluster_Eta_perlayer" + istr1,
409  "Num Merge LayerCluster Eta per Layer Cluster for layer " + istr2,
410  nintEta_,
411  minEta_,
412  maxEta_);
413  histograms.h_denom_layercl_eta_perlayer[ilayer] =
414  ibook.book1D("Denom_LayerCluster_Eta_perlayer" + istr1,
415  "Denom LayerCluster Eta per Layer Cluster for layer " + istr2,
416  nintEta_,
417  minEta_,
418  maxEta_);
419  histograms.h_num_layercl_phi_perlayer[ilayer] =
420  ibook.book1D("Num_LayerCluster_Phi_perlayer" + istr1,
421  "Num LayerCluster Phi per Layer Cluster for layer " + istr2,
422  nintPhi_,
423  minPhi_,
424  maxPhi_);
425  histograms.h_numMerge_layercl_phi_perlayer[ilayer] =
426  ibook.book1D("NumMerge_LayerCluster_Phi_perlayer" + istr1,
427  "Num Merge LayerCluster Phi per Layer Cluster for layer " + istr2,
428  nintPhi_,
429  minPhi_,
430  maxPhi_);
431  histograms.h_denom_layercl_phi_perlayer[ilayer] =
432  ibook.book1D("Denom_LayerCluster_Phi_perlayer" + istr1,
433  "Denom LayerCluster Phi per Layer Cluster for layer " + istr2,
434  nintPhi_,
435  minPhi_,
436  maxPhi_);
437  histograms.h_cellAssociation_perlayer[ilayer] =
438  ibook.book1D("cellAssociation_perlayer" + istr1, "Cell Association for layer " + istr2, 5, -4., 1.);
439  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(2, "TN(purity)");
440  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(3, "FN(ineff.)");
441  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(4, "FP(fake)");
442  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(5, "TP(eff.)");
443  }
444 
445  //---------------------------------------------------------------------------------------------------------------------------
446  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
447  auto istr = std::to_string(*it);
448  histograms.h_clusternum_perthick[(*it)] = ibook.book1D("totclusternum_thick_" + istr,
449  "total number of layer clusters for thickness " + istr,
453  //---
454  histograms.h_cellsenedens_perthick[(*it)] = ibook.book1D("cellsenedens_thick_" + istr,
455  "energy density of cluster cells for thickness " + istr,
459  }
460 
461  //---------------------------------------------------------------------------------------------------------------------------
462  //Not all combination exists but we should keep them all for cross checking reason.
463  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
464  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
465  auto istr1 = std::to_string(*it);
466  auto istr2 = std::to_string(ilayer);
467  while (istr2.size() < 2)
468  istr2.insert(0, "0");
469  auto istr = istr1 + "_" + istr2;
470  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
471  std::string istr3 = "";
472  //First with the -z endcap
473  if (ilayer < layers) {
474  istr3 = std::to_string(ilayer + 1) + " in z- ";
475  } else { //Then for the +z
476  istr3 = std::to_string(ilayer - (layers - 1)) + " in z+ ";
477  }
478  //---
479  histograms.h_cellsnum_perthickperlayer[istr] =
480  ibook.book1D("cellsnum_perthick_perlayer_" + istr,
481  "total number of cells for layer " + istr3 + " for thickness " + istr1,
485  //---
486  histograms.h_distancetoseedcell_perthickperlayer[istr] =
487  ibook.book1D("distancetoseedcell_perthickperlayer_" + istr,
488  "distance of cluster cells to seed cell for layer " + istr3 + " for thickness " + istr1,
492  //---
494  "distancetoseedcell_perthickperlayer_eneweighted_" + istr,
495  "energy weighted distance of cluster cells to seed cell for layer " + istr3 + " for thickness " + istr1,
499  //---
500  histograms.h_distancetomaxcell_perthickperlayer[istr] =
501  ibook.book1D("distancetomaxcell_perthickperlayer_" + istr,
502  "distance of cluster cells to max cell for layer " + istr3 + " for thickness " + istr1,
506  //---
508  "distancetomaxcell_perthickperlayer_eneweighted_" + istr,
509  "energy weighted distance of cluster cells to max cell for layer " + istr3 + " for thickness " + istr1,
513  //---
515  ibook.book1D("distancebetseedandmaxcell_perthickperlayer_" + istr,
516  "distance of seed cell to max cell for layer " + istr3 + " for thickness " + istr1,
520  //---
522  "distancebetseedandmaxcellvsclusterenergy_perthickperlayer_" + istr,
523  "distance of seed cell to max cell vs cluster energy for layer " + istr3 + " for thickness " + istr1,
530  }
531  }
532  //---------------------------------------------------------------------------------------------------------------------------
533 }
534 
536  histograms.h_score_multicl2caloparticle.push_back(ibook.book1D(
537  "Score_multicl2caloparticle", "Score of Multi Cluster per CaloParticle", nintScore_, minScore_, maxScore_));
538  histograms.h_score_caloparticle2multicl.push_back(ibook.book1D(
539  "Score_caloparticle2multicl", "Score of CaloParticle per Multi Cluster", nintScore_, minScore_, maxScore_));
540  histograms.h_energy_vs_score_multicl2caloparticle.push_back(
541  ibook.book2D("Energy_vs_Score_multi2caloparticle",
542  "Energy vs Score of Multi Cluster per CaloParticle",
543  nintScore_,
544  minScore_,
545  maxScore_,
549  histograms.h_energy_vs_score_caloparticle2multicl.push_back(
550  ibook.book2D("Energy_vs_Score_caloparticle2multi",
551  "Energy vs Score of CaloParticle per Multi Cluster",
552  nintScore_,
553  minScore_,
554  maxScore_,
558 
559  //back to all multiclusters
560  histograms.h_num_multicl_eta.push_back(
561  ibook.book1D("Num_MultiCluster_Eta", "Num MultiCluster Eta per Multi Cluster ", nintEta_, minEta_, maxEta_));
562  histograms.h_numMerge_multicl_eta.push_back(ibook.book1D(
563  "NumMerge_MultiCluster_Eta", "Num Merge MultiCluster Eta per Multi Cluster ", nintEta_, minEta_, maxEta_));
564  histograms.h_denom_multicl_eta.push_back(
565  ibook.book1D("Denom_MultiCluster_Eta", "Denom MultiCluster Eta per Multi Cluster", nintEta_, minEta_, maxEta_));
566  histograms.h_num_multicl_phi.push_back(
567  ibook.book1D("Num_MultiCluster_Phi", "Num MultiCluster Phi per Multi Cluster ", nintPhi_, minPhi_, maxPhi_));
568  histograms.h_numMerge_multicl_phi.push_back(ibook.book1D(
569  "NumMerge_MultiCluster_Phi", "Num Merge MultiCluster Phi per Multi Cluster", nintPhi_, minPhi_, maxPhi_));
570  histograms.h_denom_multicl_phi.push_back(
571  ibook.book1D("Denom_MultiCluster_Phi", "Denom MultiCluster Phi per Multi Cluster", nintPhi_, minPhi_, maxPhi_));
572  histograms.h_sharedenergy_multicl2caloparticle.push_back(
573  ibook.book1D("SharedEnergy_multicluster2caloparticle",
574  "Shared Energy of Multi Cluster per Calo Particle in each layer",
578  histograms.h_sharedenergy_multicl2caloparticle_vs_eta.push_back(
579  ibook.bookProfile("SharedEnergy_multicl2caloparticle_vs_eta",
580  "Shared Energy of MultiCluster vs #eta per best Calo Particle in each layer",
581  nintEta_,
582  minEta_,
583  maxEta_,
586  histograms.h_sharedenergy_multicl2caloparticle_vs_phi.push_back(
587  ibook.bookProfile("SharedEnergy_multicl2caloparticle_vs_phi",
588  "Shared Energy of MultiCluster vs #phi per best Calo Particle in each layer",
589  nintPhi_,
590  minPhi_,
591  maxPhi_,
594  histograms.h_sharedenergy_caloparticle2multicl.push_back(
595  ibook.book1D("SharedEnergy_caloparticle2multicl",
596  "Shared Energy of CaloParticle per Multi Cluster",
600  histograms.h_sharedenergy_caloparticle2multicl_vs_eta.push_back(
601  ibook.bookProfile("SharedEnergy_caloparticle2multicl_vs_eta",
602  "Shared Energy of CaloParticle vs #eta per best Multi Cluster",
603  nintEta_,
604  minEta_,
605  maxEta_,
608  histograms.h_sharedenergy_caloparticle2multicl_vs_phi.push_back(
609  ibook.bookProfile("SharedEnergy_caloparticle2multicl_vs_phi",
610  "Shared Energy of CaloParticle vs #phi per best Multi Cluster",
611  nintPhi_,
612  minPhi_,
613  maxPhi_,
616  histograms.h_num_caloparticle_eta.push_back(
617  ibook.book1D("Num_CaloParticle_Eta", "Num CaloParticle Eta per Multi Cluster", nintEta_, minEta_, maxEta_));
618  histograms.h_numDup_multicl_eta.push_back(
619  ibook.book1D("NumDup_MultiCluster_Eta", "Num Duplicate MultiCl vs Eta", nintEta_, minEta_, maxEta_));
620  histograms.h_denom_caloparticle_eta.push_back(
621  ibook.book1D("Denom_CaloParticle_Eta", "Denom CaloParticle Eta per Multi Cluster", nintEta_, minEta_, maxEta_));
622  histograms.h_num_caloparticle_phi.push_back(
623  ibook.book1D("Num_CaloParticle_Phi", "Num CaloParticle Phi per Multi Cluster", nintPhi_, minPhi_, maxPhi_));
624  histograms.h_numDup_multicl_phi.push_back(
625  ibook.book1D("NumDup_MultiCluster_Phi", "Num Duplicate MultiCl vs Phi", nintPhi_, minPhi_, maxPhi_));
626  histograms.h_denom_caloparticle_phi.push_back(
627  ibook.book1D("Denom_CaloParticle_Phi", "Denom CaloParticle Phi per Multi Cluster", nintPhi_, minPhi_, maxPhi_));
628 
629  std::unordered_map<int, dqm::reco::MonitorElement*> clusternum_in_multicluster_perlayer;
630  clusternum_in_multicluster_perlayer.clear();
631 
632  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
633  auto istr1 = std::to_string(ilayer);
634  while (istr1.size() < 2) {
635  istr1.insert(0, "0");
636  }
637  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
638  std::string istr2 = "";
639  //First with the -z endcap
640  if (ilayer < layers) {
641  istr2 = std::to_string(ilayer + 1) + " in z-";
642  } else { //Then for the +z
643  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
644  }
645 
646  clusternum_in_multicluster_perlayer[ilayer] =
647  ibook.book1D("clusternum_in_multicluster_perlayer" + istr1,
648  "Number of layer clusters in multicluster for layer " + istr2,
652  }
653 
654  histograms.h_clusternum_in_multicluster_perlayer.push_back(std::move(clusternum_in_multicluster_perlayer));
655 
656  histograms.h_multiclusternum.push_back(
657  ibook.book1D("totmulticlusternum", "total number of multiclusters", nintTotNMCLs_, minTotNMCLs_, maxTotNMCLs_));
658 
659  histograms.h_contmulticlusternum.push_back(ibook.book1D("contmulticlusternum",
660  "number of multiclusters with 3 contiguous layers",
662  minTotNMCLs_,
663  maxTotNMCLs_));
664 
665  histograms.h_noncontmulticlusternum.push_back(ibook.book1D("noncontmulticlusternum",
666  "number of multiclusters without 3 contiguous layers",
668  minTotNMCLs_,
669  maxTotNMCLs_));
670 
671  histograms.h_clusternum_in_multicluster.push_back(ibook.book1D("clusternum_in_multicluster",
672  "total number of layer clusters in multicluster",
676 
677  histograms.h_clusternum_in_multicluster_vs_layer.push_back(
678  ibook.bookProfile("clusternum_in_multicluster_vs_layer",
679  "Profile of 2d layer clusters in multicluster vs layer number",
680  2 * layers,
681  0.,
682  2. * layers,
685 
686  histograms.h_multiplicityOfLCinMCL.push_back(ibook.book2D("multiplicityOfLCinMCL",
687  "Multiplicity vs Layer cluster size in Multiclusters",
689  minMplofLCs_,
690  maxMplofLCs_,
694 
695  histograms.h_multiplicity_numberOfEventsHistogram.push_back(ibook.book1D("multiplicity_numberOfEventsHistogram",
696  "multiplicity numberOfEventsHistogram",
698  minMplofLCs_,
699  maxMplofLCs_));
700 
702  ibook.book1D("multiplicity_zminus_numberOfEventsHistogram",
703  "multiplicity numberOfEventsHistogram in z-",
705  minMplofLCs_,
706  maxMplofLCs_));
707 
709  ibook.book1D("multiplicity_zplus_numberOfEventsHistogram",
710  "multiplicity numberOfEventsHistogram in z+",
712  minMplofLCs_,
713  maxMplofLCs_));
714 
716  ibook.book2D("multiplicityOfLCinMCL_vs_layercluster_zminus",
717  "Multiplicity vs Layer number in z-",
719  minMplofLCs_,
720  maxMplofLCs_,
721  layers,
722  0.,
723  (float)layers));
724 
726  ibook.book2D("multiplicityOfLCinMCL_vs_layercluster_zplus",
727  "Multiplicity vs Layer number in z+",
729  minMplofLCs_,
730  maxMplofLCs_,
731  layers,
732  0.,
733  (float)layers));
734 
736  ibook.book2D("multiplicityOfLCinMCL_vs_layerclusterenergy",
737  "Multiplicity vs Layer cluster energy",
739  minMplofLCs_,
740  maxMplofLCs_,
744 
745  histograms.h_multicluster_pt.push_back(
746  ibook.book1D("multicluster_pt", "Pt of the multicluster", nintPt_, minPt_, maxPt_));
747  histograms.h_multicluster_eta.push_back(
748  ibook.book1D("multicluster_eta", "Eta of the multicluster", nintEta_, minEta_, maxEta_));
749  histograms.h_multicluster_phi.push_back(
750  ibook.book1D("multicluster_phi", "Phi of the multicluster", nintPhi_, minPhi_, maxPhi_));
751  histograms.h_multicluster_energy.push_back(
752  ibook.book1D("multicluster_energy", "Energy of the multicluster", nintEne_, minEne_, maxEne_));
753  histograms.h_multicluster_x.push_back(
754  ibook.book1D("multicluster_x", "X position of the multicluster", nintX_, minX_, maxX_));
755  histograms.h_multicluster_y.push_back(
756  ibook.book1D("multicluster_y", "Y position of the multicluster", nintY_, minY_, maxY_));
757  histograms.h_multicluster_z.push_back(
758  ibook.book1D("multicluster_z", "Z position of the multicluster", nintZ_, minZ_, maxZ_));
759  histograms.h_multicluster_firstlayer.push_back(
760  ibook.book1D("multicluster_firstlayer", "First layer of the multicluster", 2 * layers, 0., (float)2 * layers));
761  histograms.h_multicluster_lastlayer.push_back(
762  ibook.book1D("multicluster_lastlayer", "Last layer of the multicluster", 2 * layers, 0., (float)2 * layers));
763  histograms.h_multicluster_layersnum.push_back(ibook.book1D(
764  "multicluster_layersnum", "Number of layers of the multicluster", 2 * layers, 0., (float)2 * layers));
765 }
766 
768  //We will save some info straight from geometry to avoid mistakes from updates
769  //----------- TODO ----------------------------------------------------------
770  //For now values returned for 'lastLayerFHzp': '104', 'lastLayerFHzm': '52' are not the one expected.
771  //Will come back to this when there will be info in CMSSW to put in DQM file.
772  histograms.lastLayerEEzm->Fill(recHitTools_->lastLayerEE());
773  histograms.lastLayerFHzm->Fill(recHitTools_->lastLayerFH());
774  histograms.maxlayerzm->Fill(layers);
775  histograms.lastLayerEEzp->Fill(recHitTools_->lastLayerEE() + layers);
776  histograms.lastLayerFHzp->Fill(recHitTools_->lastLayerFH() + layers);
777  histograms.maxlayerzp->Fill(layers + layers);
778 }
779 
781  int pdgid,
782  const CaloParticle& caloparticle,
783  std::vector<SimVertex> const& simVertices) const {
784  const auto eta = getEta(caloparticle.eta());
785  if (histograms.h_caloparticle_eta.count(pdgid)) {
786  histograms.h_caloparticle_eta.at(pdgid)->Fill(eta);
787  }
788  if (histograms.h_caloparticle_eta_Zorigin.count(pdgid)) {
789  histograms.h_caloparticle_eta_Zorigin.at(pdgid)->Fill(
790  simVertices.at(caloparticle.g4Tracks()[0].vertIndex()).position().z(), eta);
791  }
792 
793  if (histograms.h_caloparticle_energy.count(pdgid)) {
794  histograms.h_caloparticle_energy.at(pdgid)->Fill(caloparticle.energy());
795  }
796  if (histograms.h_caloparticle_pt.count(pdgid)) {
797  histograms.h_caloparticle_pt.at(pdgid)->Fill(caloparticle.pt());
798  }
799  if (histograms.h_caloparticle_phi.count(pdgid)) {
800  histograms.h_caloparticle_phi.at(pdgid)->Fill(caloparticle.phi());
801  }
802 }
803 
805  int count,
806  const reco::CaloCluster& cluster) const {
807  const auto eta = getEta(cluster.eta());
808  histograms.h_cluster_eta[count]->Fill(eta);
809 }
810 
813  std::vector<CaloParticle> const& cP,
814  std::vector<size_t> const& cPIndices,
815  std::map<DetId, const HGCRecHit*> const& hitMap,
816  unsigned layers) const {
817  auto nLayerClusters = clusters.size();
818  //Consider CaloParticles coming from the hard scatterer, excluding the PU contribution.
819  auto nCaloParticles = cPIndices.size();
820 
821  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>> detIdToCaloParticleId_Map;
822  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>> detIdToLayerClusterId_Map;
823 
824  // this contains the ids of the caloparticles contributing with at least one hit to the layer cluster and the reconstruction error
825  std::vector<std::vector<std::pair<unsigned int, float>>> cpsInLayerCluster;
826  cpsInLayerCluster.resize(nLayerClusters);
827 
828  std::vector<std::vector<caloParticleOnLayer>> cPOnLayer;
829  cPOnLayer.resize(nCaloParticles);
830  for (unsigned int i = 0; i < nCaloParticles; ++i) {
831  cPOnLayer[i].resize(layers * 2);
832  for (unsigned int j = 0; j < layers * 2; ++j) {
833  cPOnLayer[i][j].caloParticleId = i;
834  cPOnLayer[i][j].energy = 0.f;
835  cPOnLayer[i][j].hits_and_fractions.clear();
836  }
837  }
838 
839  for (const auto& cpId : cPIndices) {
840  const SimClusterRefVector& simClusterRefVector = cP[cpId].simClusters();
841  for (const auto& it_sc : simClusterRefVector) {
842  const SimCluster& simCluster = (*(it_sc));
843  const auto& hits_and_fractions = simCluster.hits_and_fractions();
844  for (const auto& it_haf : hits_and_fractions) {
845  DetId hitid = (it_haf.first);
846  int cpLayerId = recHitTools_->getLayerWithOffset(hitid) + layers * ((recHitTools_->zside(hitid) + 1) >> 1) - 1;
847  std::map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(hitid);
848  if (itcheck != hitMap.end()) {
849  const HGCRecHit* hit = itcheck->second;
850  auto hit_find_it = detIdToCaloParticleId_Map.find(hitid);
851  if (hit_find_it == detIdToCaloParticleId_Map.end()) {
852  detIdToCaloParticleId_Map[hitid] = std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>();
853  detIdToCaloParticleId_Map[hitid].emplace_back(
854  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
855  } else {
856  auto findHitIt = std::find(detIdToCaloParticleId_Map[hitid].begin(),
857  detIdToCaloParticleId_Map[hitid].end(),
858  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
859  if (findHitIt != detIdToCaloParticleId_Map[hitid].end()) {
860  findHitIt->fraction += it_haf.second;
861  } else {
862  detIdToCaloParticleId_Map[hitid].emplace_back(
863  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
864  }
865  }
866  cPOnLayer[cpId][cpLayerId].energy += it_haf.second * hit->energy();
867  // We need to compress the hits and fractions in order to have a
868  // reasonable score between CP and LC. Imagine, for example, that a
869  // CP has detID X used by 2 SimClusters with different fractions. If
870  // a single LC uses X with fraction 1 and is compared to the 2
871  // contributions separately, it will be assigned a score != 0, which
872  // is wrong.
873  auto& haf = cPOnLayer[cpId][cpLayerId].hits_and_fractions;
874  auto found = std::find_if(
875  std::begin(haf), std::end(haf), [&hitid](const std::pair<DetId, float>& v) { return v.first == hitid; });
876  if (found != haf.end()) {
877  found->second += it_haf.second;
878  } else {
879  cPOnLayer[cpId][cpLayerId].hits_and_fractions.emplace_back(hitid, it_haf.second);
880  }
881  }
882  }
883  }
884  }
885 
886  LogDebug("HGCalValidator") << "cPOnLayer INFO" << std::endl;
887  for (size_t cp = 0; cp < cPOnLayer.size(); ++cp) {
888  LogDebug("HGCalValidator") << "For CaloParticle Idx: " << cp << " we have: " << std::endl;
889  for (size_t cpp = 0; cpp < cPOnLayer[cp].size(); ++cpp) {
890  LogDebug("HGCalValidator") << " On Layer: " << cpp << " we have:" << std::endl;
891  LogDebug("HGCalValidator") << " CaloParticleIdx: " << cPOnLayer[cp][cpp].caloParticleId << std::endl;
892  LogDebug("HGCalValidator") << " Energy: " << cPOnLayer[cp][cpp].energy << std::endl;
893  double tot_energy = 0.;
894  for (auto const& haf : cPOnLayer[cp][cpp].hits_and_fractions) {
895  LogDebug("HGCalValidator") << " Hits/fraction/energy: " << (uint32_t)haf.first << "/" << haf.second << "/"
896  << haf.second * hitMap.at(haf.first)->energy() << std::endl;
897  tot_energy += haf.second * hitMap.at(haf.first)->energy();
898  }
899  LogDebug("HGCalValidator") << " Tot Sum haf: " << tot_energy << std::endl;
900  for (auto const& lc : cPOnLayer[cp][cpp].layerClusterIdToEnergyAndScore) {
901  LogDebug("HGCalValidator") << " lcIdx/energy/score: " << lc.first << "/" << lc.second.first << "/"
902  << lc.second.second << std::endl;
903  }
904  }
905  }
906 
907  LogDebug("HGCalValidator") << "detIdToCaloParticleId_Map INFO" << std::endl;
908  for (auto const& cp : detIdToCaloParticleId_Map) {
909  LogDebug("HGCalValidator") << "For detId: " << (uint32_t)cp.first
910  << " we have found the following connections with CaloParticles:" << std::endl;
911  for (auto const& cpp : cp.second) {
912  LogDebug("HGCalValidator") << " CaloParticle Id: " << cpp.clusterId << " with fraction: " << cpp.fraction
913  << " and energy: " << cpp.fraction * hitMap.at(cp.first)->energy() << std::endl;
914  }
915  }
916 
917  for (unsigned int lcId = 0; lcId < nLayerClusters; ++lcId) {
918  const std::vector<std::pair<DetId, float>>& hits_and_fractions = clusters[lcId].hitsAndFractions();
919  unsigned int numberOfHitsInLC = hits_and_fractions.size();
920 
921  // This vector will store, for each hit in the Layercluster, the index of
922  // the CaloParticle that contributed the most, in terms of energy, to it.
923  // Special values are:
924  //
925  // -2 --> the reconstruction fraction of the RecHit is 0 (used in the past to monitor Halo Hits)
926  // -3 --> same as before with the added condition that no CaloParticle has been linked to this RecHit
927  // -1 --> the reco fraction is >0, but no CaloParticle has been linked to it
928  // >=0 --> index of the linked CaloParticle
929  std::vector<int> hitsToCaloParticleId(numberOfHitsInLC);
930  const auto firstHitDetId = hits_and_fractions[0].first;
931  int lcLayerId =
932  recHitTools_->getLayerWithOffset(firstHitDetId) + layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
933 
934  // This will store the index of the CaloParticle linked to the LayerCluster that has the most number of hits in common.
935  int maxCPId_byNumberOfHits = -1;
936  // This will store the maximum number of shared hits between a Layercluster andd a CaloParticle
937  unsigned int maxCPNumberOfHitsInLC = 0;
938  // This will store the index of the CaloParticle linked to the LayerCluster that has the most energy in common.
939  //
940  int maxCPId_byEnergy = -1;
941  // This will store the maximum number of shared energy between a Layercluster and a CaloParticle
942  float maxEnergySharedLCandCP = 0.f;
943  // This will store the fraction of the LayerCluster energy shared with the best(energy) CaloParticle: e_shared/lc_energy
944  float energyFractionOfLCinCP = 0.f;
945  // This will store the fraction of the CaloParticle energy shared with the LayerCluster: e_shared/cp_energy
946  float energyFractionOfCPinLC = 0.f;
947  std::unordered_map<unsigned, unsigned> occurrencesCPinLC;
948  std::unordered_map<unsigned, float> CPEnergyInLC;
949  unsigned int numberOfNoiseHitsInLC = 0;
950  unsigned int numberOfHaloHitsInLC = 0;
951 
952  for (unsigned int hitId = 0; hitId < numberOfHitsInLC; hitId++) {
953  DetId rh_detid = hits_and_fractions[hitId].first;
954  auto rhFraction = hits_and_fractions[hitId].second;
955 
956  std::map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
957  const HGCRecHit* hit = itcheck->second;
958 
959  auto hit_find_in_LC = detIdToLayerClusterId_Map.find(rh_detid);
960  if (hit_find_in_LC == detIdToLayerClusterId_Map.end()) {
961  detIdToLayerClusterId_Map[rh_detid] = std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>();
962  }
963  detIdToLayerClusterId_Map[rh_detid].emplace_back(HGVHistoProducerAlgo::detIdInfoInCluster{lcId, rhFraction});
964 
965  auto hit_find_in_CP = detIdToCaloParticleId_Map.find(rh_detid);
966 
967  // if the fraction is zero or the hit does not belong to any calo
968  // particle, set the caloparticleId for the hit to -1 this will
969  // contribute to the number of noise hits
970 
971  // MR Remove the case in which the fraction is 0, since this could be a
972  // real hit that has been marked as halo.
973  if (rhFraction == 0.) {
974  hitsToCaloParticleId[hitId] = -2;
975  numberOfHaloHitsInLC++;
976  }
977  if (hit_find_in_CP == detIdToCaloParticleId_Map.end()) {
978  hitsToCaloParticleId[hitId] -= 1;
979  } else {
980  auto maxCPEnergyInLC = 0.f;
981  auto maxCPId = -1;
982  for (auto& h : hit_find_in_CP->second) {
983  CPEnergyInLC[h.clusterId] += h.fraction * hit->energy();
984  cPOnLayer[h.clusterId][lcLayerId].layerClusterIdToEnergyAndScore[lcId].first += h.fraction * hit->energy();
985  cpsInLayerCluster[lcId].emplace_back(std::make_pair<int, float>(h.clusterId, 0.f));
986  // Keep track of which CaloParticle ccontributed the most, in terms
987  // of energy, to this specific LayerCluster.
988  if (CPEnergyInLC[h.clusterId] > maxCPEnergyInLC) {
989  maxCPEnergyInLC = CPEnergyInLC[h.clusterId];
990  maxCPId = h.clusterId;
991  }
992  }
993  hitsToCaloParticleId[hitId] = maxCPId;
994  }
995  histograms.h_cellAssociation_perlayer.at(lcLayerId)->Fill(
996  hitsToCaloParticleId[hitId] > 0. ? 0. : hitsToCaloParticleId[hitId]);
997  } // End loop over hits on a LayerCluster
998 
999  for (auto& c : hitsToCaloParticleId) {
1000  if (c < 0) {
1001  numberOfNoiseHitsInLC++;
1002  } else {
1003  occurrencesCPinLC[c]++;
1004  }
1005  }
1006 
1007  for (auto& c : occurrencesCPinLC) {
1008  if (c.second > maxCPNumberOfHitsInLC) {
1009  maxCPId_byNumberOfHits = c.first;
1010  maxCPNumberOfHitsInLC = c.second;
1011  }
1012  }
1013 
1014  for (auto& c : CPEnergyInLC) {
1015  if (c.second > maxEnergySharedLCandCP) {
1016  maxCPId_byEnergy = c.first;
1017  maxEnergySharedLCandCP = c.second;
1018  }
1019  }
1020  float totalCPEnergyOnLayer = 0.f;
1021  if (maxCPId_byEnergy >= 0) {
1022  totalCPEnergyOnLayer = cPOnLayer[maxCPId_byEnergy][lcLayerId].energy;
1023  energyFractionOfCPinLC = maxEnergySharedLCandCP / totalCPEnergyOnLayer;
1024  if (clusters[lcId].energy() > 0.f) {
1025  energyFractionOfLCinCP = maxEnergySharedLCandCP / clusters[lcId].energy();
1026  }
1027  }
1028  LogDebug("HGCalValidator") << std::setw(10) << "LayerId:"
1029  << "\t" << std::setw(12) << "layerCluster"
1030  << "\t" << std::setw(10) << "lc energy"
1031  << "\t" << std::setw(5) << "nhits"
1032  << "\t" << std::setw(12) << "noise hits"
1033  << "\t" << std::setw(22) << "maxCPId_byNumberOfHits"
1034  << "\t" << std::setw(8) << "nhitsCP"
1035  << "\t" << std::setw(13) << "maxCPId_byEnergy"
1036  << "\t" << std::setw(20) << "maxEnergySharedLCandCP"
1037  << "\t" << std::setw(22) << "totalCPEnergyOnLayer"
1038  << "\t" << std::setw(22) << "energyFractionOfLCinCP"
1039  << "\t" << std::setw(25) << "energyFractionOfCPinLC"
1040  << "\t"
1041  << "\n";
1042  LogDebug("HGCalValidator") << std::setw(10) << lcLayerId << "\t" << std::setw(12) << lcId << "\t" << std::setw(10)
1043  << clusters[lcId].energy() << "\t" << std::setw(5) << numberOfHitsInLC << "\t"
1044  << std::setw(12) << numberOfNoiseHitsInLC << "\t" << std::setw(22)
1045  << maxCPId_byNumberOfHits << "\t" << std::setw(8) << maxCPNumberOfHitsInLC << "\t"
1046  << std::setw(13) << maxCPId_byEnergy << "\t" << std::setw(20) << maxEnergySharedLCandCP
1047  << "\t" << std::setw(22) << totalCPEnergyOnLayer << "\t" << std::setw(22)
1048  << energyFractionOfLCinCP << "\t" << std::setw(25) << energyFractionOfCPinLC << "\n";
1049  } // End of loop over LayerClusters
1050 
1051  LogDebug("HGCalValidator") << "Improved cPOnLayer INFO" << std::endl;
1052  for (size_t cp = 0; cp < cPOnLayer.size(); ++cp) {
1053  LogDebug("HGCalValidator") << "For CaloParticle Idx: " << cp << " we have: " << std::endl;
1054  for (size_t cpp = 0; cpp < cPOnLayer[cp].size(); ++cpp) {
1055  LogDebug("HGCalValidator") << " On Layer: " << cpp << " we have:" << std::endl;
1056  LogDebug("HGCalValidator") << " CaloParticleIdx: " << cPOnLayer[cp][cpp].caloParticleId << std::endl;
1057  LogDebug("HGCalValidator") << " Energy: " << cPOnLayer[cp][cpp].energy << std::endl;
1058  double tot_energy = 0.;
1059  for (auto const& haf : cPOnLayer[cp][cpp].hits_and_fractions) {
1060  LogDebug("HGCalValidator") << " Hits/fraction/energy: " << (uint32_t)haf.first << "/" << haf.second << "/"
1061  << haf.second * hitMap.at(haf.first)->energy() << std::endl;
1062  tot_energy += haf.second * hitMap.at(haf.first)->energy();
1063  }
1064  LogDebug("HGCalValidator") << " Tot Sum haf: " << tot_energy << std::endl;
1065  for (auto const& lc : cPOnLayer[cp][cpp].layerClusterIdToEnergyAndScore) {
1066  LogDebug("HGCalValidator") << " lcIdx/energy/score: " << lc.first << "/" << lc.second.first << "/"
1067  << lc.second.second << std::endl;
1068  }
1069  }
1070  }
1071 
1072  LogDebug("HGCalValidator") << "Improved detIdToCaloParticleId_Map INFO" << std::endl;
1073  for (auto const& cp : detIdToCaloParticleId_Map) {
1074  LogDebug("HGCalValidator") << "For detId: " << (uint32_t)cp.first
1075  << " we have found the following connections with CaloParticles:" << std::endl;
1076  for (auto const& cpp : cp.second) {
1077  LogDebug("HGCalValidator") << " CaloParticle Id: " << cpp.clusterId << " with fraction: " << cpp.fraction
1078  << " and energy: " << cpp.fraction * hitMap.at(cp.first)->energy() << std::endl;
1079  }
1080  }
1081 
1082  for (unsigned int lcId = 0; lcId < nLayerClusters; ++lcId) {
1083  // find the unique caloparticles id contributing to the layer clusters
1084  std::sort(cpsInLayerCluster[lcId].begin(), cpsInLayerCluster[lcId].end());
1085  auto last = std::unique(cpsInLayerCluster[lcId].begin(), cpsInLayerCluster[lcId].end());
1086  cpsInLayerCluster[lcId].erase(last, cpsInLayerCluster[lcId].end());
1087  const std::vector<std::pair<DetId, float>>& hits_and_fractions = clusters[lcId].hitsAndFractions();
1088  unsigned int numberOfHitsInLC = hits_and_fractions.size();
1089  auto firstHitDetId = hits_and_fractions[0].first;
1090  int lcLayerId =
1091  recHitTools_->getLayerWithOffset(firstHitDetId) + layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
1092  // If a reconstructed LayerCluster has energy 0 but is linked to a CaloParticle, assigned score 1
1093  if (clusters[lcId].energy() == 0. && !cpsInLayerCluster[lcId].empty()) {
1094  for (auto& cpPair : cpsInLayerCluster[lcId]) {
1095  cpPair.second = 1.;
1096  LogDebug("HGCalValidator") << "layerCluster Id: \t" << lcId << "\t CP id: \t" << cpPair.first << "\t score \t"
1097  << cpPair.second << "\n";
1098  histograms.h_score_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(cpPair.second);
1099  }
1100  continue;
1101  }
1102 
1103  // Compute the correct normalization
1104  float invLayerClusterEnergyWeight = 0.f;
1105  for (auto const& haf : clusters[lcId].hitsAndFractions()) {
1106  invLayerClusterEnergyWeight +=
1107  (haf.second * hitMap.at(haf.first)->energy()) * (haf.second * hitMap.at(haf.first)->energy());
1108  }
1109  invLayerClusterEnergyWeight = 1.f / invLayerClusterEnergyWeight;
1110 
1111  for (unsigned int i = 0; i < numberOfHitsInLC; ++i) {
1112  DetId rh_detid = hits_and_fractions[i].first;
1113  float rhFraction = hits_and_fractions[i].second;
1114  bool hitWithNoCP = false;
1115 
1116  auto hit_find_in_CP = detIdToCaloParticleId_Map.find(rh_detid);
1117  if (hit_find_in_CP == detIdToCaloParticleId_Map.end())
1118  hitWithNoCP = true;
1119  auto itcheck = hitMap.find(rh_detid);
1120  const HGCRecHit* hit = itcheck->second;
1121  float hitEnergyWeight = hit->energy() * hit->energy();
1122 
1123  for (auto& cpPair : cpsInLayerCluster[lcId]) {
1124  float cpFraction = 0.f;
1125  if (!hitWithNoCP) {
1126  auto findHitIt = std::find(detIdToCaloParticleId_Map[rh_detid].begin(),
1127  detIdToCaloParticleId_Map[rh_detid].end(),
1128  HGVHistoProducerAlgo::detIdInfoInCluster{cpPair.first, 0.f});
1129  if (findHitIt != detIdToCaloParticleId_Map[rh_detid].end())
1130  cpFraction = findHitIt->fraction;
1131  }
1132  cpPair.second +=
1133  (rhFraction - cpFraction) * (rhFraction - cpFraction) * hitEnergyWeight * invLayerClusterEnergyWeight;
1134  }
1135  } // End of loop over Hits within a LayerCluster
1136 
1137  if (cpsInLayerCluster[lcId].empty())
1138  LogDebug("HGCalValidator") << "layerCluster Id: \t" << lcId << "\tCP id:\t-1 "
1139  << "\t score \t-1"
1140  << "\n";
1141 
1142  for (auto& cpPair : cpsInLayerCluster[lcId]) {
1143  LogDebug("HGCalValidator") << "layerCluster Id: \t" << lcId << "\t CP id: \t" << cpPair.first << "\t score \t"
1144  << cpPair.second << "\n";
1145  histograms.h_score_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(cpPair.second);
1146  auto const& cp_linked = cPOnLayer[cpPair.first][lcLayerId].layerClusterIdToEnergyAndScore[lcId];
1147  histograms.h_sharedenergy_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(
1148  cp_linked.first / clusters[lcId].energy(), clusters[lcId].energy());
1149  histograms.h_energy_vs_score_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(
1150  cpPair.second, cp_linked.first / clusters[lcId].energy());
1151  }
1152 
1153  auto assoc = std::count_if(std::begin(cpsInLayerCluster[lcId]),
1154  std::end(cpsInLayerCluster[lcId]),
1155  [](const auto& obj) { return obj.second < ScoreCutLCtoCP_; });
1156  if (assoc) {
1157  histograms.h_num_layercl_eta_perlayer.at(lcLayerId)->Fill(clusters[lcId].eta());
1158  histograms.h_num_layercl_phi_perlayer.at(lcLayerId)->Fill(clusters[lcId].phi());
1159  if (assoc > 1) {
1160  histograms.h_numMerge_layercl_eta_perlayer.at(lcLayerId)->Fill(clusters[lcId].eta());
1161  histograms.h_numMerge_layercl_phi_perlayer.at(lcLayerId)->Fill(clusters[lcId].phi());
1162  }
1163  auto best = std::min_element(std::begin(cpsInLayerCluster[lcId]),
1164  std::end(cpsInLayerCluster[lcId]),
1165  [](const auto& obj1, const auto& obj2) { return obj1.second < obj2.second; });
1166  auto const& best_cp_linked = cPOnLayer[best->first][lcLayerId].layerClusterIdToEnergyAndScore[lcId];
1167  histograms.h_sharedenergy_layercl2caloparticle_vs_eta_perlayer.at(lcLayerId)->Fill(
1168  clusters[lcId].eta(), best_cp_linked.first / clusters[lcId].energy());
1169  histograms.h_sharedenergy_layercl2caloparticle_vs_phi_perlayer.at(lcLayerId)->Fill(
1170  clusters[lcId].phi(), best_cp_linked.first / clusters[lcId].energy());
1171  }
1172  histograms.h_denom_layercl_eta_perlayer.at(lcLayerId)->Fill(clusters[lcId].eta());
1173  histograms.h_denom_layercl_phi_perlayer.at(lcLayerId)->Fill(clusters[lcId].phi());
1174  } // End of loop over LayerClusters
1175 
1176  for (const auto& cpId : cPIndices) {
1177  for (unsigned int layerId = 0; layerId < layers * 2; ++layerId) {
1178  unsigned int CPNumberOfHits = cPOnLayer[cpId][layerId].hits_and_fractions.size();
1179  float CPenergy = cPOnLayer[cpId][layerId].energy;
1180  if (CPNumberOfHits == 0)
1181  continue;
1182  int lcWithMaxEnergyInCP = -1;
1183  float maxEnergyLCinCP = 0.f;
1184  float CPEnergyFractionInLC = 0.f;
1185  for (auto& lc : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
1186  if (lc.second.first > maxEnergyLCinCP) {
1187  maxEnergyLCinCP = lc.second.first;
1188  lcWithMaxEnergyInCP = lc.first;
1189  }
1190  }
1191  if (CPenergy > 0.f)
1192  CPEnergyFractionInLC = maxEnergyLCinCP / CPenergy;
1193 
1194  LogDebug("HGCalValidator") << std::setw(8) << "LayerId:\t" << std::setw(12) << "caloparticle\t" << std::setw(15)
1195  << "cp total energy\t" << std::setw(15) << "cpEnergyOnLayer\t" << std::setw(14)
1196  << "CPNhitsOnLayer\t" << std::setw(18) << "lcWithMaxEnergyInCP\t" << std::setw(15)
1197  << "maxEnergyLCinCP\t" << std::setw(20) << "CPEnergyFractionInLC"
1198  << "\n";
1199  LogDebug("HGCalValidator") << std::setw(8) << layerId << "\t" << std::setw(12) << cpId << "\t" << std::setw(15)
1200  << cP[cpId].energy() << "\t" << std::setw(15) << CPenergy << "\t" << std::setw(14)
1201  << CPNumberOfHits << "\t" << std::setw(18) << lcWithMaxEnergyInCP << "\t"
1202  << std::setw(15) << maxEnergyLCinCP << "\t" << std::setw(20) << CPEnergyFractionInLC
1203  << "\n";
1204 
1205  // Compute the correct normalization
1206  float invCPEnergyWeight = 0.f;
1207  for (auto const& haf : cPOnLayer[cpId][layerId].hits_and_fractions) {
1208  invCPEnergyWeight +=
1209  (haf.second * hitMap.at(haf.first)->energy()) * (haf.second * hitMap.at(haf.first)->energy());
1210  }
1211  invCPEnergyWeight = 1.f / invCPEnergyWeight;
1212 
1213  for (unsigned int i = 0; i < CPNumberOfHits; ++i) {
1214  auto& cp_hitDetId = cPOnLayer[cpId][layerId].hits_and_fractions[i].first;
1215  auto& cpFraction = cPOnLayer[cpId][layerId].hits_and_fractions[i].second;
1216 
1217  bool hitWithNoLC = false;
1218  if (cpFraction == 0.f)
1219  continue; //hopefully this should never happen
1220  auto hit_find_in_LC = detIdToLayerClusterId_Map.find(cp_hitDetId);
1221  if (hit_find_in_LC == detIdToLayerClusterId_Map.end())
1222  hitWithNoLC = true;
1223  auto itcheck = hitMap.find(cp_hitDetId);
1224  const HGCRecHit* hit = itcheck->second;
1225  float hitEnergyWeight = hit->energy() * hit->energy();
1226  for (auto& lcPair : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
1227  unsigned int layerClusterId = lcPair.first;
1228  float lcFraction = 0.f;
1229 
1230  if (!hitWithNoLC) {
1231  auto findHitIt = std::find(detIdToLayerClusterId_Map[cp_hitDetId].begin(),
1232  detIdToLayerClusterId_Map[cp_hitDetId].end(),
1233  HGVHistoProducerAlgo::detIdInfoInCluster{layerClusterId, 0.f});
1234  if (findHitIt != detIdToLayerClusterId_Map[cp_hitDetId].end())
1235  lcFraction = findHitIt->fraction;
1236  }
1237  // if (lcFraction == 0.) {
1238  // lcFraction = -1.;
1239  // }
1240  lcPair.second.second +=
1241  (lcFraction - cpFraction) * (lcFraction - cpFraction) * hitEnergyWeight * invCPEnergyWeight;
1242  LogDebug("HGCalValidator") << "cpDetId:\t" << (uint32_t)cp_hitDetId << "\tlayerClusterId:\t" << layerClusterId
1243  << "\t"
1244  << "lcfraction,cpfraction:\t" << lcFraction << ", " << cpFraction << "\t"
1245  << "hitEnergyWeight:\t" << hitEnergyWeight << "\t"
1246  << "current score:\t" << lcPair.second.second << "\t"
1247  << "invCPEnergyWeight:\t" << invCPEnergyWeight << "\n";
1248  } // End of loop over LayerClusters linked to hits of this CaloParticle
1249  } // End of loop over hits of CaloParticle on a Layer
1250 
1251  if (cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore.empty())
1252  LogDebug("HGCalValidator") << "CP Id: \t" << cpId << "\tLC id:\t-1 "
1253  << "\t score \t-1"
1254  << "\n";
1255 
1256  for (auto& lcPair : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
1257  LogDebug("HGCalValidator") << "CP Id: \t" << cpId << "\t LC id: \t" << lcPair.first << "\t score \t"
1258  << lcPair.second.second << "\t"
1259  << "shared energy:\t" << lcPair.second.first << "\t"
1260  << "shared energy fraction:\t" << (lcPair.second.first / CPenergy) << "\n";
1261  histograms.h_score_caloparticle2layercl_perlayer.at(layerId)->Fill(lcPair.second.second);
1262  histograms.h_sharedenergy_caloparticle2layercl_perlayer.at(layerId)->Fill(lcPair.second.first / CPenergy,
1263  CPenergy);
1264  histograms.h_energy_vs_score_caloparticle2layercl_perlayer.at(layerId)->Fill(lcPair.second.second,
1265  lcPair.second.first / CPenergy);
1266  }
1267  auto assoc = std::count_if(std::begin(cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore),
1268  std::end(cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore),
1269  [](const auto& obj) { return obj.second.second < ScoreCutCPtoLC_; });
1270  if (assoc) {
1271  histograms.h_num_caloparticle_eta_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
1272  histograms.h_num_caloparticle_phi_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
1273  if (assoc > 1) {
1274  histograms.h_numDup_caloparticle_eta_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
1275  histograms.h_numDup_caloparticle_phi_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
1276  }
1277  auto best = std::min_element(
1278  std::begin(cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore),
1279  std::end(cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore),
1280  [](const auto& obj1, const auto& obj2) { return obj1.second.second < obj2.second.second; });
1281  histograms.h_sharedenergy_caloparticle2layercl_vs_eta_perlayer.at(layerId)->Fill(
1282  cP[cpId].g4Tracks()[0].momentum().eta(), best->second.first / CPenergy);
1283  histograms.h_sharedenergy_caloparticle2layercl_vs_phi_perlayer.at(layerId)->Fill(
1284  cP[cpId].g4Tracks()[0].momentum().phi(), best->second.first / CPenergy);
1285  }
1286  histograms.h_denom_caloparticle_eta_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
1287  histograms.h_denom_caloparticle_phi_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
1288  }
1289  }
1290 }
1291 
1293  int count,
1295  const Density& densities,
1296  std::vector<CaloParticle> const& cP,
1297  std::vector<size_t> const& cPIndices,
1298  std::map<DetId, const HGCRecHit*> const& hitMap,
1299  std::map<double, double> cummatbudg,
1300  unsigned layers,
1301  std::vector<int> thicknesses) const {
1302  //Each event to be treated as two events: an event in +ve endcap,
1303  //plus another event in -ve endcap. In this spirit there will be
1304  //a layer variable (layerid) that maps the layers in :
1305  //-z: 0->51
1306  //+z: 52->103
1307 
1308  //To keep track of total num of layer clusters per layer
1309  //tnlcpl[layerid]
1310  std::vector<int> tnlcpl(1000, 0); //tnlcpl.clear(); tnlcpl.reserve(1000);
1311 
1312  //To keep track of the total num of clusters per thickness in plus and in minus endcaps
1313  std::map<std::string, int> tnlcpthplus;
1314  tnlcpthplus.clear();
1315  std::map<std::string, int> tnlcpthminus;
1316  tnlcpthminus.clear();
1317  //At the beginning of the event all layers should be initialized to zero total clusters per thickness
1318  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
1319  tnlcpthplus.insert(std::pair<std::string, int>(std::to_string(*it), 0));
1320  tnlcpthminus.insert(std::pair<std::string, int>(std::to_string(*it), 0));
1321  }
1322  //To keep track of the total num of clusters with mixed thickness hits per event
1323  tnlcpthplus.insert(std::pair<std::string, int>("mixed", 0));
1324  tnlcpthminus.insert(std::pair<std::string, int>("mixed", 0));
1325 
1326  layerClusters_to_CaloParticles(histograms, clusters, cP, cPIndices, hitMap, layers);
1327 
1328  //To find out the total amount of energy clustered per layer
1329  //Initialize with zeros because I see clear gives weird numbers.
1330  std::vector<double> tecpl(1000, 0.0); //tecpl.clear(); tecpl.reserve(1000);
1331  //for the longitudinal depth barycenter
1332  std::vector<double> ldbar(1000, 0.0); //ldbar.clear(); ldbar.reserve(1000);
1333 
1334  //We need to compare with the total amount of energy coming from caloparticles
1335  double caloparteneplus = 0.;
1336  double caloparteneminus = 0.;
1337  for (const auto& cpId : cPIndices) {
1338  if (cP[cpId].eta() >= 0.) {
1339  caloparteneplus = caloparteneplus + cP[cpId].energy();
1340  }
1341  if (cP[cpId].eta() < 0.) {
1342  caloparteneminus = caloparteneminus + cP[cpId].energy();
1343  }
1344  }
1345 
1346  //loop through clusters of the event
1347  for (unsigned int layerclusterIndex = 0; layerclusterIndex < clusters.size(); layerclusterIndex++) {
1348  const std::vector<std::pair<DetId, float>> hits_and_fractions = clusters[layerclusterIndex].hitsAndFractions();
1349 
1350  const DetId seedid = clusters[layerclusterIndex].seed();
1351  const double seedx = recHitTools_->getPosition(seedid).x();
1352  const double seedy = recHitTools_->getPosition(seedid).y();
1353  DetId maxid = findmaxhit(clusters[layerclusterIndex], hitMap);
1354 
1355  // const DetId maxid = clusters[layerclusterIndex].max();
1356  double maxx = recHitTools_->getPosition(maxid).x();
1357  double maxy = recHitTools_->getPosition(maxid).y();
1358 
1359  //Auxillary variables to count the number of different kind of hits in each cluster
1360  int nthhits120p = 0;
1361  int nthhits200p = 0;
1362  int nthhits300p = 0;
1363  int nthhitsscintp = 0;
1364  int nthhits120m = 0;
1365  int nthhits200m = 0;
1366  int nthhits300m = 0;
1367  int nthhitsscintm = 0;
1368  //For the hits thickness of the layer cluster.
1369  double thickness = 0.;
1370  //The layer the cluster belongs to. As mentioned in the mapping above, it takes into account -z and +z.
1371  int layerid = 0;
1372  //We will need another layer variable for the longitudinal material budget file reading.
1373  //In this case we need no distinction between -z and +z.
1374  int lay = 0;
1375  //We will need here to save the combination thick_lay
1376  std::string istr = "";
1377  //boolean to check for the layer that the cluster belong to. Maybe later will check all the layer hits.
1378  bool cluslay = true;
1379  //zside that the current cluster belongs to.
1380  int zside = 0;
1381 
1382  for (std::vector<std::pair<DetId, float>>::const_iterator it_haf = hits_and_fractions.begin();
1383  it_haf != hits_and_fractions.end();
1384  ++it_haf) {
1385  const DetId rh_detid = it_haf->first;
1386  //The layer that the current hit belongs to
1387  layerid = recHitTools_->getLayerWithOffset(rh_detid) + layers * ((recHitTools_->zside(rh_detid) + 1) >> 1) - 1;
1388  lay = recHitTools_->getLayerWithOffset(rh_detid);
1389  zside = recHitTools_->zside(rh_detid);
1390  if (rh_detid.det() == DetId::Forward || rh_detid.det() == DetId::HGCalEE || rh_detid.det() == DetId::HGCalHSi) {
1391  thickness = recHitTools_->getSiThickness(rh_detid);
1392  } else if (rh_detid.det() == DetId::HGCalHSc) {
1393  thickness = -1;
1394  } else {
1395  LogDebug("HGCalValidator") << "These are HGCal layer clusters, you shouldn't be here !!! " << layerid << "\n";
1396  continue;
1397  }
1398 
1399  //Count here only once the layer cluster and save the combination thick_layerid
1400  std::string curistr = std::to_string((int)thickness);
1401  std::string lay_string = std::to_string(layerid);
1402  while (lay_string.size() < 2)
1403  lay_string.insert(0, "0");
1404  curistr += "_" + lay_string;
1405  if (cluslay) {
1406  tnlcpl[layerid]++;
1407  istr = curistr;
1408  cluslay = false;
1409  }
1410 
1411  if ((thickness == 120.) && (recHitTools_->zside(rh_detid) > 0.)) {
1412  nthhits120p++;
1413  } else if ((thickness == 120.) && (recHitTools_->zside(rh_detid) < 0.)) {
1414  nthhits120m++;
1415  } else if ((thickness == 200.) && (recHitTools_->zside(rh_detid) > 0.)) {
1416  nthhits200p++;
1417  } else if ((thickness == 200.) && (recHitTools_->zside(rh_detid) < 0.)) {
1418  nthhits200m++;
1419  } else if ((thickness == 300.) && (recHitTools_->zside(rh_detid) > 0.)) {
1420  nthhits300p++;
1421  } else if ((thickness == 300.) && (recHitTools_->zside(rh_detid) < 0.)) {
1422  nthhits300m++;
1423  } else if ((thickness == -1) && (recHitTools_->zside(rh_detid) > 0.)) {
1424  nthhitsscintp++;
1425  } else if ((thickness == -1) && (recHitTools_->zside(rh_detid) < 0.)) {
1426  nthhitsscintm++;
1427  } else { //assert(0);
1428  LogDebug("HGCalValidator")
1429  << " You are running a geometry that contains thicknesses different than the normal ones. "
1430  << "\n";
1431  }
1432 
1433  std::map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
1434  if (itcheck == hitMap.end()) {
1435  LogDebug("HGCalValidator") << " You shouldn't be here - Unable to find a hit " << rh_detid.rawId() << " "
1436  << rh_detid.det() << " " << HGCalDetId(rh_detid) << "\n";
1437  continue;
1438  }
1439 
1440  const HGCRecHit* hit = itcheck->second;
1441 
1442  //Here for the per cell plots
1443  //----
1444  const double hit_x = recHitTools_->getPosition(rh_detid).x();
1445  const double hit_y = recHitTools_->getPosition(rh_detid).y();
1446  double distancetoseed = distance(seedx, seedy, hit_x, hit_y);
1447  double distancetomax = distance(maxx, maxy, hit_x, hit_y);
1448  if (distancetoseed != 0. && histograms.h_distancetoseedcell_perthickperlayer.count(curistr)) {
1449  histograms.h_distancetoseedcell_perthickperlayer.at(curistr)->Fill(distancetoseed);
1450  }
1451  //----
1452  if (distancetoseed != 0. && histograms.h_distancetoseedcell_perthickperlayer_eneweighted.count(curistr)) {
1453  histograms.h_distancetoseedcell_perthickperlayer_eneweighted.at(curistr)->Fill(distancetoseed, hit->energy());
1454  }
1455  //----
1456  if (distancetomax != 0. && histograms.h_distancetomaxcell_perthickperlayer.count(curistr)) {
1457  histograms.h_distancetomaxcell_perthickperlayer.at(curistr)->Fill(distancetomax);
1458  }
1459  //----
1460  if (distancetomax != 0. && histograms.h_distancetomaxcell_perthickperlayer_eneweighted.count(curistr)) {
1461  histograms.h_distancetomaxcell_perthickperlayer_eneweighted.at(curistr)->Fill(distancetomax, hit->energy());
1462  }
1463 
1464  //Let's check the density
1465  std::map<DetId, float>::const_iterator dit = densities.find(rh_detid);
1466  if (dit == densities.end()) {
1467  LogDebug("HGCalValidator") << " You shouldn't be here - Unable to find a density " << rh_detid.rawId() << " "
1468  << rh_detid.det() << " " << HGCalDetId(rh_detid) << "\n";
1469  continue;
1470  }
1471 
1472  if (histograms.h_cellsenedens_perthick.count((int)thickness)) {
1473  histograms.h_cellsenedens_perthick.at((int)thickness)->Fill(dit->second);
1474  }
1475 
1476  } // end of loop through hits and fractions
1477 
1478  //Check for simultaneously having hits of different kind. Checking at least two combinations is sufficient.
1479  if ((nthhits120p != 0 && nthhits200p != 0) || (nthhits120p != 0 && nthhits300p != 0) ||
1480  (nthhits120p != 0 && nthhitsscintp != 0) || (nthhits200p != 0 && nthhits300p != 0) ||
1481  (nthhits200p != 0 && nthhitsscintp != 0) || (nthhits300p != 0 && nthhitsscintp != 0)) {
1482  tnlcpthplus["mixed"]++;
1483  } else if ((nthhits120p != 0 || nthhits200p != 0 || nthhits300p != 0 || nthhitsscintp != 0)) {
1484  //This is a cluster with hits of one kind
1485  tnlcpthplus[std::to_string((int)thickness)]++;
1486  }
1487  if ((nthhits120m != 0 && nthhits200m != 0) || (nthhits120m != 0 && nthhits300m != 0) ||
1488  (nthhits120m != 0 && nthhitsscintm != 0) || (nthhits200m != 0 && nthhits300m != 0) ||
1489  (nthhits200m != 0 && nthhitsscintm != 0) || (nthhits300m != 0 && nthhitsscintm != 0)) {
1490  tnlcpthminus["mixed"]++;
1491  } else if ((nthhits120m != 0 || nthhits200m != 0 || nthhits300m != 0 || nthhitsscintm != 0)) {
1492  //This is a cluster with hits of one kind
1493  tnlcpthminus[std::to_string((int)thickness)]++;
1494  }
1495 
1496  //To find the thickness with the biggest amount of cells
1497  std::vector<int> bigamoth;
1498  bigamoth.clear();
1499  if (zside > 0) {
1500  bigamoth.push_back(nthhits120p);
1501  bigamoth.push_back(nthhits200p);
1502  bigamoth.push_back(nthhits300p);
1503  bigamoth.push_back(nthhitsscintp);
1504  }
1505  if (zside < 0) {
1506  bigamoth.push_back(nthhits120m);
1507  bigamoth.push_back(nthhits200m);
1508  bigamoth.push_back(nthhits300m);
1509  bigamoth.push_back(nthhitsscintm);
1510  }
1511  auto bgth = std::max_element(bigamoth.begin(), bigamoth.end());
1512  istr = std::to_string(thicknesses[std::distance(bigamoth.begin(), bgth)]);
1513  std::string lay_string = std::to_string(layerid);
1514  while (lay_string.size() < 2)
1515  lay_string.insert(0, "0");
1516  istr += "_" + lay_string;
1517 
1518  //Here for the per cluster plots that need the thickness_layer info
1519  if (histograms.h_cellsnum_perthickperlayer.count(istr)) {
1520  histograms.h_cellsnum_perthickperlayer.at(istr)->Fill(hits_and_fractions.size());
1521  }
1522 
1523  //Now, with the distance between seed and max cell.
1524  double distancebetseedandmax = distance(seedx, seedy, maxx, maxy);
1525  //The thickness_layer combination in this case will use the thickness of the seed as a convention.
1526  std::string seedstr = std::to_string((int)recHitTools_->getSiThickness(seedid)) + "_" + std::to_string(layerid);
1527  seedstr += "_" + lay_string;
1528  if (histograms.h_distancebetseedandmaxcell_perthickperlayer.count(seedstr)) {
1529  histograms.h_distancebetseedandmaxcell_perthickperlayer.at(seedstr)->Fill(distancebetseedandmax);
1530  }
1531  if (histograms.h_distancebetseedandmaxcellvsclusterenergy_perthickperlayer.count(seedstr)) {
1533  distancebetseedandmax, clusters[layerclusterIndex].energy());
1534  }
1535 
1536  //Energy clustered per layer
1537  tecpl[layerid] = tecpl[layerid] + clusters[layerclusterIndex].energy();
1538  ldbar[layerid] = ldbar[layerid] + clusters[layerclusterIndex].energy() * cummatbudg[(double)lay];
1539 
1540  } //end of loop through clusters of the event
1541 
1542  //After the end of the event we can now fill with the results.
1543  //First a couple of variables to keep the sum of the energy of all clusters
1544  double sumeneallcluspl = 0.;
1545  double sumeneallclusmi = 0.;
1546  //And the longitudinal variable
1547  double sumldbarpl = 0.;
1548  double sumldbarmi = 0.;
1549  //Per layer : Loop 0->103
1550  for (unsigned ilayer = 0; ilayer < layers * 2; ++ilayer) {
1551  if (histograms.h_clusternum_perlayer.count(ilayer)) {
1552  histograms.h_clusternum_perlayer.at(ilayer)->Fill(tnlcpl[ilayer]);
1553  }
1554  // Two times one for plus and one for minus
1555  //First with the -z endcap
1556  if (ilayer < layers) {
1557  if (histograms.h_energyclustered_perlayer.count(ilayer)) {
1558  if (caloparteneminus != 0.) {
1559  histograms.h_energyclustered_perlayer.at(ilayer)->Fill(100. * tecpl[ilayer] / caloparteneminus);
1560  }
1561  }
1562  //Keep here the total energy for the event in -z
1563  sumeneallclusmi = sumeneallclusmi + tecpl[ilayer];
1564  //And for the longitudinal variable
1565  sumldbarmi = sumldbarmi + ldbar[ilayer];
1566  } else { //Then for the +z
1567  if (histograms.h_energyclustered_perlayer.count(ilayer)) {
1568  if (caloparteneplus != 0.) {
1569  histograms.h_energyclustered_perlayer.at(ilayer)->Fill(100. * tecpl[ilayer] / caloparteneplus);
1570  }
1571  }
1572  //Keep here the total energy for the event in -z
1573  sumeneallcluspl = sumeneallcluspl + tecpl[ilayer];
1574  //And for the longitudinal variable
1575  sumldbarpl = sumldbarpl + ldbar[ilayer];
1576  } //end of +z loop
1577 
1578  } //end of loop over layers
1579 
1580  //Per thickness
1581  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
1582  if (histograms.h_clusternum_perthick.count(*it)) {
1583  histograms.h_clusternum_perthick.at(*it)->Fill(tnlcpthplus[std::to_string(*it)]);
1584  histograms.h_clusternum_perthick.at(*it)->Fill(tnlcpthminus[std::to_string(*it)]);
1585  }
1586  }
1587  //Mixed thickness clusters
1588  histograms.h_mixedhitscluster_zplus[count]->Fill(tnlcpthplus["mixed"]);
1589  histograms.h_mixedhitscluster_zminus[count]->Fill(tnlcpthminus["mixed"]);
1590 
1591  //Total energy clustered from all layer clusters (fraction)
1592  if (caloparteneplus != 0.) {
1593  histograms.h_energyclustered_zplus[count]->Fill(100. * sumeneallcluspl / caloparteneplus);
1594  }
1595  if (caloparteneminus != 0.) {
1596  histograms.h_energyclustered_zminus[count]->Fill(100. * sumeneallclusmi / caloparteneminus);
1597  }
1598 
1599  //For the longitudinal depth barycenter
1600  histograms.h_longdepthbarycentre_zplus[count]->Fill(sumldbarpl / sumeneallcluspl);
1601  histograms.h_longdepthbarycentre_zminus[count]->Fill(sumldbarmi / sumeneallclusmi);
1602 }
1603 
1605  int count,
1606  const std::vector<reco::HGCalMultiCluster>& multiClusters,
1607  std::vector<CaloParticle> const& cP,
1608  std::vector<size_t> const& cPIndices,
1609  std::map<DetId, const HGCRecHit*> const& hitMap,
1610  unsigned layers) const {
1611  auto nMultiClusters = multiClusters.size();
1612  //Consider CaloParticles coming from the hard scatterer, excluding the PU contribution.
1613  auto nCaloParticles = cPIndices.size();
1614 
1615  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>> detIdToCaloParticleId_Map;
1616  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInMultiCluster>> detIdToMultiClusterId_Map;
1617  std::vector<int> tracksters_fakemerge(nMultiClusters, 0);
1618  std::vector<int> tracksters_duplicate(nMultiClusters, 0);
1619 
1620  // this contains the ids of the caloparticles contributing with at least one hit to the multi cluster and the reconstruction error
1621  //cpsInLayerCluster[multicluster][CPids]
1622  //Connects a multicluster with all related caloparticles.
1623  std::vector<std::vector<std::pair<unsigned int, float>>> cpsInMultiCluster;
1624  cpsInMultiCluster.resize(nMultiClusters);
1625 
1626  //cPOnLayer[caloparticle][layer]
1627  //This defines a "calo particle on layer" concept. It is only filled in case
1628  //that calo particle has a reconstructed hit related via detid. So, a cPOnLayer[i][j] connects a
1629  //specific calo particle i in layer j with:
1630  //1. the sum of all rechits energy times fraction of the relevant simhit in layer j related to that calo particle i.
1631  //2. the hits and fractions of that calo particle i in layer j.
1632  //3. the layer clusters with matched rechit id.
1633  std::vector<std::vector<caloParticleOnLayer>> cPOnLayer;
1634  cPOnLayer.resize(nCaloParticles);
1635  for (unsigned int i = 0; i < nCaloParticles; ++i) {
1636  cPOnLayer[i].resize(layers * 2);
1637  for (unsigned int j = 0; j < layers * 2; ++j) {
1638  cPOnLayer[i][j].caloParticleId = i;
1639  cPOnLayer[i][j].energy = 0.f;
1640  cPOnLayer[i][j].hits_and_fractions.clear();
1641  }
1642  }
1643 
1644  for (const auto& cpId : cPIndices) {
1645  //take sim clusters
1646  const SimClusterRefVector& simClusterRefVector = cP[cpId].simClusters();
1647  //loop through sim clusters
1648  for (const auto& it_sc : simClusterRefVector) {
1649  const SimCluster& simCluster = (*(it_sc));
1650  const auto& hits_and_fractions = simCluster.hits_and_fractions();
1651  for (const auto& it_haf : hits_and_fractions) {
1652  DetId hitid = (it_haf.first);
1653  //V9:maps the layers in -z: 0->51 and in +z: 52->103
1654  //V10:maps the layers in -z: 0->49 and in +z: 50->99
1655  int cpLayerId = recHitTools_->getLayerWithOffset(hitid) + layers * ((recHitTools_->zside(hitid) + 1) >> 1) - 1;
1656  std::map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(hitid);
1657  //Checks whether the current hit belonging to sim cluster has a reconstructed hit.
1658  if (itcheck != hitMap.end()) {
1659  const HGCRecHit* hit = itcheck->second;
1660  //Since the current hit from sim cluster has a reconstructed hit with the same detid,
1661  //make a map that will connect a detid with:
1662  //1. the caloparticles that have a simcluster with sim hits in that cell via caloparticle id.
1663  //2. the sum of all simhits fractions that contributes to that detid.
1664  //So, keep in mind that in case of multiple caloparticles contributing in the same cell
1665  //the fraction is the sum over all calo particles. So, something like:
1666  //detid: (caloparticle 1, sum of hits fractions in that detid over all cp) , (caloparticle 2, sum of hits fractions in that detid over all cp), (caloparticle 3, sum of hits fractions in that detid over all cp) ...
1667  auto hit_find_it = detIdToCaloParticleId_Map.find(hitid);
1668  if (hit_find_it == detIdToCaloParticleId_Map.end()) {
1669  detIdToCaloParticleId_Map[hitid] = std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>();
1670  detIdToCaloParticleId_Map[hitid].emplace_back(
1671  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
1672  } else {
1673  auto findHitIt = std::find(detIdToCaloParticleId_Map[hitid].begin(),
1674  detIdToCaloParticleId_Map[hitid].end(),
1675  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
1676  if (findHitIt != detIdToCaloParticleId_Map[hitid].end()) {
1677  findHitIt->fraction += it_haf.second;
1678  } else {
1679  detIdToCaloParticleId_Map[hitid].emplace_back(
1680  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
1681  }
1682  }
1683  //Since the current hit from sim cluster has a reconstructed hit with the same detid,
1684  //fill the cPOnLayer[caloparticle][layer] object with energy (sum of all rechits energy times fraction
1685  //of the relevant simhit) and keep the hit (detid and fraction) that contributed.
1686  cPOnLayer[cpId][cpLayerId].energy += it_haf.second * hit->energy();
1687  // We need to compress the hits and fractions in order to have a
1688  // reasonable score between CP and LC. Imagine, for example, that a
1689  // CP has detID X used by 2 SimClusters with different fractions. If
1690  // a single LC uses X with fraction 1 and is compared to the 2
1691  // contributions separately, it will be assigned a score != 0, which
1692  // is wrong.
1693  auto& haf = cPOnLayer[cpId][cpLayerId].hits_and_fractions;
1694  auto found = std::find_if(
1695  std::begin(haf), std::end(haf), [&hitid](const std::pair<DetId, float>& v) { return v.first == hitid; });
1696  if (found != haf.end()) {
1697  found->second += it_haf.second;
1698  } else {
1699  cPOnLayer[cpId][cpLayerId].hits_and_fractions.emplace_back(hitid, it_haf.second);
1700  }
1701  }
1702  } // end of loop through simhits
1703  } // end of loop through simclusters
1704  } // end of loop through caloparticles
1705 
1706  //Loop through multiclusters
1707  for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
1708  const auto& hits_and_fractions = multiClusters[mclId].hitsAndFractions();
1709 
1710  std::unordered_map<unsigned, float> CPEnergyInMCL;
1711  int maxCPId_byNumberOfHits = -1;
1712  unsigned int maxCPNumberOfHitsInMCL = 0;
1713  int maxCPId_byEnergy = -1;
1714  float maxEnergySharedMCLandCP = 0.f;
1715  float energyFractionOfMCLinCP = 0.f;
1716  float energyFractionOfCPinMCL = 0.f;
1717 
1718  //In case of matched rechit-simhit, so matched
1719  //caloparticle-layercluster-multicluster, he counts and saves the number of
1720  //rechits related to the maximum energy CaloParticle out of all
1721  //CaloParticles related to that layer cluster and multicluster.
1722 
1723  std::unordered_map<unsigned, unsigned> occurrencesCPinMCL;
1724  unsigned int numberOfNoiseHitsInMCL = 0;
1725  unsigned int numberOfHaloHitsInMCL = 0;
1726  unsigned int numberOfHitsInMCL = 0;
1727 
1728  //number of hits related to that cluster.
1729  unsigned int numberOfHitsInLC = hits_and_fractions.size();
1730  numberOfHitsInMCL += numberOfHitsInLC;
1731  std::unordered_map<unsigned, float> CPEnergyInLC;
1732 
1733  //hitsToCaloParticleId is a vector of ints, one for each rechit of the
1734  //layer cluster under study. If negative, there is no simhit from any CaloParticle related.
1735  //If positive, at least one CaloParticle has been found with matched simhit.
1736  //In more detail:
1737  // 1. hitsToCaloParticleId[hitId] = -3
1738  // TN: These represent Halo Cells(N) that have not been
1739  // assigned to any CaloParticle (hence the T).
1740  // 2. hitsToCaloParticleId[hitId] = -2
1741  // FN: There represent Halo Cells(N) that have been assigned
1742  // to a CaloParticle (hence the F, since those should have not been marked as halo)
1743  // 3. hitsToCaloParticleId[hitId] = -1
1744  // FP: These represent Real Cells(P) that have not been
1745  // assigned to any CaloParticle (hence the F, since these are fakes)
1746  // 4. hitsToCaloParticleId[hitId] >= 0
1747  // TP There represent Real Cells(P) that have been assigned
1748  // to a CaloParticle (hence the T)
1749 
1750  std::vector<int> hitsToCaloParticleId(numberOfHitsInLC);
1751  //det id of the first hit just to make the lcLayerId variable
1752  //which maps the layers in -z: 0->51 and in +z: 52->103
1753  const auto firstHitDetId = hits_and_fractions[0].first;
1754  int lcLayerId =
1755  recHitTools_->getLayerWithOffset(firstHitDetId) + layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
1756 
1757  //Loop though the hits of the layer cluster under study
1758  for (unsigned int hitId = 0; hitId < numberOfHitsInLC; hitId++) {
1759  DetId rh_detid = hits_and_fractions[hitId].first;
1760  auto rhFraction = hits_and_fractions[hitId].second;
1761 
1762  //Since the hit is belonging to the layer cluster, it must also be in the rechits map.
1763  std::map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
1764  const HGCRecHit* hit = itcheck->second;
1765 
1766  //Make a map that will connect a detid (that belongs to a rechit of the layer cluster under study,
1767  //no need to save others) with:
1768  //1. the layer clusters that have rechits in that detid
1769  //2. the fraction of the rechit of each layer cluster that contributes to that detid.
1770  //So, something like:
1771  //detid: (layer cluster 1, hit fraction) , (layer cluster 2, hit fraction), (layer cluster 3, hit fraction) ...
1772  //here comparing with the calo particle map above the
1773  auto hit_find_in_LC = detIdToMultiClusterId_Map.find(rh_detid);
1774  if (hit_find_in_LC == detIdToMultiClusterId_Map.end()) {
1775  detIdToMultiClusterId_Map[rh_detid] = std::vector<HGVHistoProducerAlgo::detIdInfoInMultiCluster>();
1776  }
1777  detIdToMultiClusterId_Map[rh_detid].emplace_back(
1778  HGVHistoProducerAlgo::detIdInfoInMultiCluster{mclId, mclId, rhFraction});
1779 
1780  //Check whether the rechit of the layer cluster under study has a sim hit in the same cell.
1781  auto hit_find_in_CP = detIdToCaloParticleId_Map.find(rh_detid);
1782 
1783  // if the fraction is zero or the hit does not belong to any calo
1784  // particle, set the caloparticleId for the hit to -1 this will
1785  // contribute to the number of noise hits
1786 
1787  // MR Remove the case in which the fraction is 0, since this could be a
1788  // real hit that has been marked as halo.
1789  if (rhFraction == 0.) {
1790  hitsToCaloParticleId[hitId] = -2;
1791  numberOfHaloHitsInMCL++;
1792  }
1793  if (hit_find_in_CP == detIdToCaloParticleId_Map.end()) {
1794  hitsToCaloParticleId[hitId] -= 1;
1795  } else {
1796  auto maxCPEnergyInLC = 0.f;
1797  auto maxCPId = -1;
1798  for (auto& h : hit_find_in_CP->second) {
1799  auto shared_fraction = std::min(rhFraction, h.fraction);
1800  //We are in the case where there are calo particles with simhits connected via detid with the rechit under study
1801  //So, from all layers clusters, find the rechits that are connected with a calo particle and save/calculate the
1802  //energy of that calo particle as the sum over all rechits of the rechits energy weighted
1803  //by the caloparticle's fraction related to that rechit.
1804  CPEnergyInMCL[h.clusterId] += shared_fraction * hit->energy();
1805  //Same but for layer clusters for the cell association per layer
1806  CPEnergyInLC[h.clusterId] += shared_fraction * hit->energy();
1807  //Here cPOnLayer[caloparticle][layer] describe above is set.
1808  //Here for multi clusters with matched rechit the CP fraction times hit energy is added and saved .
1809  cPOnLayer[h.clusterId][lcLayerId].layerClusterIdToEnergyAndScore[mclId].first +=
1810  shared_fraction * hit->energy();
1811  //cpsInMultiCluster[multicluster][CPids]
1812  //Connects a multi cluster with all related caloparticles.
1813  cpsInMultiCluster[mclId].emplace_back(std::make_pair<int, float>(h.clusterId, 0.f));
1814  //From all CaloParticles related to a layer cluster, he saves id and energy of the calo particle
1815  //that after simhit-rechit matching in layer has the maximum energy.
1816  if (shared_fraction > maxCPEnergyInLC) {
1817  //energy is used only here. cpid is saved for multiclusters
1818  maxCPEnergyInLC = CPEnergyInLC[h.clusterId];
1819  maxCPId = h.clusterId;
1820  }
1821  }
1822  //Keep in mind here maxCPId could be zero. So, below ask for negative not including zero to count noise.
1823  hitsToCaloParticleId[hitId] = maxCPId;
1824  }
1825 
1826  } //end of loop through rechits of the layer cluster.
1827 
1828  //Loop through all rechits to count how many of them are noise and how many are matched.
1829  //In case of matched rechit-simhit, he counts and saves the number of rechits related to the maximum energy CaloParticle.
1830  for (auto& c : hitsToCaloParticleId) {
1831  if (c < 0) {
1832  numberOfNoiseHitsInMCL++;
1833  } else {
1834  occurrencesCPinMCL[c]++;
1835  }
1836  }
1837 
1838  //Below from all maximum energy CaloParticles, he saves the one with the largest amount
1839  //of related rechits.
1840  for (auto& c : occurrencesCPinMCL) {
1841  if (c.second > maxCPNumberOfHitsInMCL) {
1842  maxCPId_byNumberOfHits = c.first;
1843  maxCPNumberOfHitsInMCL = c.second;
1844  }
1845  }
1846 
1847  //Find the CaloParticle that has the maximum energy shared with the multicluster under study.
1848  for (auto& c : CPEnergyInMCL) {
1849  if (c.second > maxEnergySharedMCLandCP) {
1850  maxCPId_byEnergy = c.first;
1851  maxEnergySharedMCLandCP = c.second;
1852  }
1853  }
1854  //The energy of the CaloParticle that found to have the maximum energy shared with the multicluster under study.
1855  float totalCPEnergyFromLayerCP = 0.f;
1856  if (maxCPId_byEnergy >= 0) {
1857  //Loop through all layers
1858  for (unsigned int j = 0; j < layers * 2; ++j) {
1859  totalCPEnergyFromLayerCP = totalCPEnergyFromLayerCP + cPOnLayer[maxCPId_byEnergy][j].energy;
1860  }
1861  energyFractionOfCPinMCL = maxEnergySharedMCLandCP / totalCPEnergyFromLayerCP;
1862  if (multiClusters[mclId].energy() > 0.f) {
1863  energyFractionOfMCLinCP = maxEnergySharedMCLandCP / multiClusters[mclId].energy();
1864  }
1865  }
1866 
1867  LogDebug("HGCalValidator") << std::setw(12) << "multiCluster"
1868  << "\t" //LogDebug("HGCalValidator")
1869  << std::setw(10) << "mulcl energy"
1870  << "\t" << std::setw(5) << "nhits"
1871  << "\t" << std::setw(12) << "noise hits"
1872  << "\t" << std::setw(22) << "maxCPId_byNumberOfHits"
1873  << "\t" << std::setw(8) << "nhitsCP"
1874  << "\t" << std::setw(16) << "maxCPId_byEnergy"
1875  << "\t" << std::setw(23) << "maxEnergySharedMCLandCP"
1876  << "\t" << std::setw(22) << "totalCPEnergyFromAllLayerCP"
1877  << "\t" << std::setw(22) << "energyFractionOfMCLinCP"
1878  << "\t" << std::setw(25) << "energyFractionOfCPinMCL"
1879  << "\t" << std::endl;
1880  LogDebug("HGCalValidator") << std::setw(12) << mclId << "\t" //LogDebug("HGCalValidator")
1881  << std::setw(10) << multiClusters[mclId].energy() << "\t" << std::setw(5)
1882  << numberOfHitsInMCL << "\t" << std::setw(12) << numberOfNoiseHitsInMCL << "\t"
1883  << std::setw(22) << maxCPId_byNumberOfHits << "\t" << std::setw(8)
1884  << maxCPNumberOfHitsInMCL << "\t" << std::setw(16) << maxCPId_byEnergy << "\t"
1885  << std::setw(23) << maxEnergySharedMCLandCP << "\t" << std::setw(22)
1886  << totalCPEnergyFromLayerCP << "\t" << std::setw(22) << energyFractionOfMCLinCP << "\t"
1887  << std::setw(25) << energyFractionOfCPinMCL << std::endl;
1888 
1889  } //end of loop through multi clusters
1890 
1891  //Loop through multiclusters
1892  for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
1893  const auto& hits_and_fractions = multiClusters[mclId].hitsAndFractions();
1894 
1895  // find the unique caloparticles id contributing to the multi clusters
1896  //cpsInMultiCluster[multicluster][CPids]
1897  std::sort(cpsInMultiCluster[mclId].begin(), cpsInMultiCluster[mclId].end());
1898  auto last = std::unique(cpsInMultiCluster[mclId].begin(), cpsInMultiCluster[mclId].end());
1899  cpsInMultiCluster[mclId].erase(last, cpsInMultiCluster[mclId].end());
1900 
1901  if (multiClusters[mclId].energy() == 0. && !cpsInMultiCluster[mclId].empty()) {
1902  //Loop through all CaloParticles contributing to multicluster mclId.
1903  for (auto& cpPair : cpsInMultiCluster[mclId]) {
1904  //In case of a multi cluster with zero energy but related CaloParticles the score is set to 1.
1905  cpPair.second = 1.;
1906  // LogDebug("HGCalValidator") << "multiCluster Id: \t" << mclId
1907  // << "\t CP id: \t" << cpPair.first
1908  // << "\t score \t" << cpPair.second
1909  // << "\n";
1910  LogDebug("HGCalValidator") << "multiCluster Id: \t" << mclId << "\t CP id: \t" << cpPair.first << "\t score \t"
1911  << cpPair.second << std::endl;
1912  histograms.h_score_multicl2caloparticle[count]->Fill(cpPair.second);
1913  }
1914  continue;
1915  }
1916 
1917  // Compute the correct normalization
1918  float invMultiClusterEnergyWeight = 0.f;
1919  for (auto const& haf : multiClusters[mclId].hitsAndFractions()) {
1920  invMultiClusterEnergyWeight +=
1921  (haf.second * hitMap.at(haf.first)->energy()) * (haf.second * hitMap.at(haf.first)->energy());
1922  }
1923  invMultiClusterEnergyWeight = 1.f / invMultiClusterEnergyWeight;
1924 
1925  unsigned int numberOfHitsInLC = hits_and_fractions.size();
1926  for (unsigned int i = 0; i < numberOfHitsInLC; ++i) {
1927  DetId rh_detid = hits_and_fractions[i].first;
1928  float rhFraction = hits_and_fractions[i].second;
1929  bool hitWithNoCP = false;
1930 
1931  auto hit_find_in_CP = detIdToCaloParticleId_Map.find(rh_detid);
1932  if (hit_find_in_CP == detIdToCaloParticleId_Map.end())
1933  hitWithNoCP = true;
1934  auto itcheck = hitMap.find(rh_detid);
1935  const HGCRecHit* hit = itcheck->second;
1936  float hitEnergyWeight = hit->energy() * hit->energy();
1937 
1938  for (auto& cpPair : cpsInMultiCluster[mclId]) {
1939  float cpFraction = 0.f;
1940  if (!hitWithNoCP) {
1941  auto findHitIt = std::find(detIdToCaloParticleId_Map[rh_detid].begin(),
1942  detIdToCaloParticleId_Map[rh_detid].end(),
1943  HGVHistoProducerAlgo::detIdInfoInCluster{cpPair.first, 0.f});
1944  if (findHitIt != detIdToCaloParticleId_Map[rh_detid].end()) {
1945  cpFraction = findHitIt->fraction;
1946  }
1947  }
1948  cpPair.second +=
1949  (rhFraction - cpFraction) * (rhFraction - cpFraction) * hitEnergyWeight * invMultiClusterEnergyWeight;
1950  }
1951  } //end of loop through rechits of layer cluster
1952 
1953  //In case of a multi cluster with some energy but none related CaloParticles print some info.
1954  if (cpsInMultiCluster[mclId].empty())
1955  LogDebug("HGCalValidator") << "multiCluster Id: \t" << mclId << "\tCP id:\t-1 "
1956  << "\t score \t-1"
1957  << "\n";
1958 
1959  auto score = std::min_element(std::begin(cpsInMultiCluster[mclId]),
1960  std::end(cpsInMultiCluster[mclId]),
1961  [](const auto& obj1, const auto& obj2) { return obj1.second < obj2.second; });
1962  for (auto& cpPair : cpsInMultiCluster[mclId]) {
1963  // LogDebug("HGCalValidator") << "multiCluster Id: \t" << mclId
1964  // << "\t CP id: \t" << cpPair.first
1965  // << "\t score \t" << cpPair.second
1966  // << "\n";
1967  LogDebug("HGCalValidator") << "multiCluster Id: \t" << mclId << "\t CP id: \t" << cpPair.first << "\t score \t"
1968  << cpPair.second << std::endl;
1969  if (cpPair.first == score->first) {
1970  histograms.h_score_multicl2caloparticle[count]->Fill(score->second);
1971  }
1972  float sharedeneCPallLayers = 0.;
1973  //Loop through all layers
1974  for (unsigned int j = 0; j < layers * 2; ++j) {
1975  auto const& cp_linked = cPOnLayer[cpPair.first][j].layerClusterIdToEnergyAndScore[mclId];
1976  sharedeneCPallLayers += cp_linked.first;
1977  } //end of loop through layers
1978  LogDebug("HGCalValidator") << "sharedeneCPallLayers " << sharedeneCPallLayers << std::endl;
1979  if (cpPair.first == score->first) {
1980  histograms.h_sharedenergy_multicl2caloparticle[count]->Fill(sharedeneCPallLayers /
1981  multiClusters[mclId].energy());
1983  score->second, sharedeneCPallLayers / multiClusters[mclId].energy());
1984  }
1985  }
1986 
1987  auto assocFakeMerge = std::count_if(std::begin(cpsInMultiCluster[mclId]),
1988  std::end(cpsInMultiCluster[mclId]),
1989  [](const auto& obj) { return obj.second < ScoreCutMCLtoCPFakeMerge_; });
1990  tracksters_fakemerge[mclId] = assocFakeMerge;
1991 
1992  } //end of loop through multiclusters
1993 
1994  //score3d[cpid][multiclusterid]
1995  std::vector<std::vector<float>> score3d;
1996  score3d.resize(nCaloParticles);
1997  std::vector<std::vector<float>> mclsharedenergy;
1998  mclsharedenergy.resize(nCaloParticles);
1999  std::vector<std::vector<float>> mclsharedenergyfrac;
2000  mclsharedenergyfrac.resize(nCaloParticles);
2001 
2002  for (unsigned int i = 0; i < nCaloParticles; ++i) {
2003  score3d[i].resize(nMultiClusters);
2004  mclsharedenergy[i].resize(nMultiClusters);
2005  mclsharedenergyfrac[i].resize(nMultiClusters);
2006  for (unsigned int j = 0; j < nMultiClusters; ++j) {
2007  score3d[i][j] = 0.f;
2008  mclsharedenergy[i][j] = 0.f;
2009  mclsharedenergyfrac[i][j] = 0.f;
2010  }
2011  }
2012 
2013  //Loop though caloparticles
2014  for (const auto& cpId : cPIndices) {
2015  //We need to keep the multiclusters ids that are related to
2016  //CaloParticle under study for the final filling of the score.
2017  std::vector<unsigned int> cpId_mclId_related;
2018  cpId_mclId_related.clear();
2019 
2020  float CPenergy = 0.f;
2021  for (unsigned int layerId = 0; layerId < layers * 2; ++layerId) {
2022  unsigned int CPNumberOfHits = cPOnLayer[cpId][layerId].hits_and_fractions.size();
2023  //Below gives the CP energy related to multicluster per layer.
2024  CPenergy += cPOnLayer[cpId][layerId].energy;
2025  if (CPNumberOfHits == 0)
2026  continue;
2027  int mclWithMaxEnergyInCP = -1;
2028  //This is the maximum energy related to multicluster per layer.
2029  float maxEnergyMCLperlayerinCP = 0.f;
2030  float CPEnergyFractionInMCLperlayer = 0.f;
2031  //Remember and not confused by name. layerClusterIdToEnergyAndScore contains the multicluster id.
2032  for (auto& mcl : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
2033  if (mcl.second.first > maxEnergyMCLperlayerinCP) {
2034  maxEnergyMCLperlayerinCP = mcl.second.first;
2035  mclWithMaxEnergyInCP = mcl.first;
2036  }
2037  }
2038  if (CPenergy > 0.f)
2039  CPEnergyFractionInMCLperlayer = maxEnergyMCLperlayerinCP / CPenergy;
2040 
2041  LogDebug("HGCalValidator") << std::setw(8) << "LayerId:\t" << std::setw(12) << "caloparticle\t" << std::setw(15)
2042  << "cp total energy\t" << std::setw(15) << "cpEnergyOnLayer\t" << std::setw(14)
2043  << "CPNhitsOnLayer\t" << std::setw(18) << "mclWithMaxEnergyInCP\t" << std::setw(15)
2044  << "maxEnergyMCLinCP\t" << std::setw(20) << "CPEnergyFractionInMCL"
2045  << "\n";
2046  LogDebug("HGCalValidator") << std::setw(8) << layerId << "\t" << std::setw(12) << cpId << "\t" << std::setw(15)
2047  << cP[cpId].energy() << "\t" << std::setw(15) << CPenergy << "\t" << std::setw(14)
2048  << CPNumberOfHits << "\t" << std::setw(18) << mclWithMaxEnergyInCP << "\t"
2049  << std::setw(15) << maxEnergyMCLperlayerinCP << "\t" << std::setw(20)
2050  << CPEnergyFractionInMCLperlayer << "\n";
2051 
2052  for (unsigned int i = 0; i < CPNumberOfHits; ++i) {
2053  auto& cp_hitDetId = cPOnLayer[cpId][layerId].hits_and_fractions[i].first;
2054  auto& cpFraction = cPOnLayer[cpId][layerId].hits_and_fractions[i].second;
2055 
2056  bool hitWithNoMCL = false;
2057  if (cpFraction == 0.f)
2058  continue; //hopefully this should never happen
2059  auto hit_find_in_MCL = detIdToMultiClusterId_Map.find(cp_hitDetId);
2060  if (hit_find_in_MCL == detIdToMultiClusterId_Map.end())
2061  hitWithNoMCL = true;
2062  auto itcheck = hitMap.find(cp_hitDetId);
2063  const HGCRecHit* hit = itcheck->second;
2064  float hitEnergyWeight = hit->energy() * hit->energy();
2065  for (auto& lcPair : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
2066  unsigned int multiClusterId = lcPair.first;
2067  if (std::find(std::begin(cpId_mclId_related), std::end(cpId_mclId_related), multiClusterId) ==
2068  std::end(cpId_mclId_related)) {
2069  cpId_mclId_related.push_back(multiClusterId);
2070  }
2071  float mclFraction = 0.f;
2072 
2073  if (!hitWithNoMCL) {
2074  auto findHitIt = std::find(detIdToMultiClusterId_Map[cp_hitDetId].begin(),
2075  detIdToMultiClusterId_Map[cp_hitDetId].end(),
2076  HGVHistoProducerAlgo::detIdInfoInMultiCluster{multiClusterId, 0, 0.f});
2077  if (findHitIt != detIdToMultiClusterId_Map[cp_hitDetId].end())
2078  mclFraction = findHitIt->fraction;
2079  }
2080  // if (mclFraction == 0.) {
2081  // mclFraction = -1.;
2082  // }
2083  //Observe here that we do not divide as before by the layer cluster energy weight. We should sum first
2084  //over all layers and divide with the total CP energy over all layers.
2085  // lcPair.second.second += (mclFraction - cpFraction) * (mclFraction - cpFraction) * hitEnergyWeight;
2086  lcPair.second.second += fabs(mclFraction - cpFraction) * (hit->energy());
2087  LogDebug("HGCalValidator") << "multiClusterId:\t" << multiClusterId << "\t"
2088  << "mclfraction,cpfraction:\t" << mclFraction << ", " << cpFraction << "\t"
2089  << "hitEnergyWeight:\t" << hitEnergyWeight << "\t"
2090  << "currect score numerator:\t" << lcPair.second.second << "\n";
2091  }
2092  } //end of loop through sim hits of current calo particle
2093 
2094  if (cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore.empty())
2095  LogDebug("HGCalValidator") << "CP Id: \t" << cpId << "\t MCL id:\t-1 "
2096  << "\t layer \t " << layerId << " Sub score in \t -1"
2097  << "\n";
2098 
2099  for (auto& lcPair : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
2100  //3d score here without the denominator at this point
2101  score3d[cpId][lcPair.first] += lcPair.second.second;
2102  mclsharedenergy[cpId][lcPair.first] += lcPair.second.first;
2103  }
2104  } //end of loop through layers
2105 
2106  // Compute the correct normalization
2107  // We need to loop on the cPOnLayer data structure since this is the
2108  // only one that has the compressed information for multiple usage
2109  // of the same DetId by different SimClusters by a single CaloParticle.
2110  float invCPEnergyWeight = 0.f;
2111  for (const auto& layer : cPOnLayer[cpId]) {
2112  for (const auto& haf : layer.hits_and_fractions) {
2113  invCPEnergyWeight +=
2114  (haf.second * hitMap.at(haf.first)->energy()) * (haf.second * hitMap.at(haf.first)->energy());
2115  }
2116  }
2117  invCPEnergyWeight = 1.f / invCPEnergyWeight;
2118 
2119  //Loop through related multiclusters here
2120  //Will switch to vector for access because it is faster
2121  std::vector<int> cpId_mclId_related_vec(cpId_mclId_related.begin(), cpId_mclId_related.end());
2122  for (unsigned int i = 0; i < cpId_mclId_related_vec.size(); ++i) {
2123  auto mclId = cpId_mclId_related_vec[i];
2124  //Now time for the denominator
2125  score3d[cpId][mclId] = score3d[cpId][mclId] * invCPEnergyWeight;
2126  mclsharedenergyfrac[cpId][mclId] = (mclsharedenergy[cpId][mclId] / CPenergy);
2127 
2128  LogDebug("HGCalValidator") << "CP Id: \t" << cpId << "\t MCL id: \t" << mclId << "\t score \t" //
2129  << score3d[cpId][mclId] << "\t"
2130  << "invCPEnergyWeight \t" << invCPEnergyWeight << "\t"
2131  << "shared energy:\t" << mclsharedenergy[cpId][mclId] << "\t"
2132  << "shared energy fraction:\t" << mclsharedenergyfrac[cpId][mclId] << "\n";
2133 
2134  histograms.h_score_caloparticle2multicl[count]->Fill(score3d[cpId][mclId]);
2135 
2136  histograms.h_sharedenergy_caloparticle2multicl[count]->Fill(mclsharedenergyfrac[cpId][mclId]);
2137  histograms.h_energy_vs_score_caloparticle2multicl[count]->Fill(score3d[cpId][mclId],
2138  mclsharedenergyfrac[cpId][mclId]);
2139  } //end of loop through multiclusters
2140 
2141  auto is_assoc = [&](const auto& v) -> bool { return v < ScoreCutCPtoMCLDup_; };
2142 
2143  auto assocDup = std::count_if(std::begin(score3d[cpId]), std::end(score3d[cpId]), is_assoc);
2144 
2145  if (assocDup > 0) {
2146  histograms.h_num_caloparticle_eta[count]->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
2147  histograms.h_num_caloparticle_phi[count]->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
2148  auto best = std::min_element(std::begin(score3d[cpId]), std::end(score3d[cpId]));
2149  auto bestmclId = std::distance(std::begin(score3d[cpId]), best);
2150 
2151  histograms.h_sharedenergy_caloparticle2multicl_vs_eta[count]->Fill(cP[cpId].g4Tracks()[0].momentum().eta(),
2152  multiClusters[bestmclId].energy() / CPenergy);
2153  histograms.h_sharedenergy_caloparticle2multicl_vs_phi[count]->Fill(cP[cpId].g4Tracks()[0].momentum().phi(),
2154  multiClusters[bestmclId].energy() / CPenergy);
2155  }
2156  if (assocDup >= 2) {
2157  auto match = std::find_if(std::begin(score3d[cpId]), std::end(score3d[cpId]), is_assoc);
2158  while (match != score3d[cpId].end()) {
2159  tracksters_duplicate[std::distance(std::begin(score3d[cpId]), match)] = 1;
2160  match = std::find_if(std::next(match), std::end(score3d[cpId]), is_assoc);
2161  }
2162  }
2163  histograms.h_denom_caloparticle_eta[count]->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
2164  histograms.h_denom_caloparticle_phi[count]->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
2165 
2166  } //end of loop through caloparticles
2167 
2168  for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
2169  auto assocFakeMerge = tracksters_fakemerge[mclId];
2170  auto assocDuplicate = tracksters_duplicate[mclId];
2171  if (assocDuplicate) {
2172  histograms.h_numDup_multicl_eta[count]->Fill(multiClusters[mclId].eta());
2173  histograms.h_numDup_multicl_phi[count]->Fill(multiClusters[mclId].phi());
2174  } else {
2175  if (assocFakeMerge > 0) {
2176  histograms.h_num_multicl_eta[count]->Fill(multiClusters[mclId].eta());
2177  histograms.h_num_multicl_phi[count]->Fill(multiClusters[mclId].phi());
2178  auto best = std::min_element(std::begin(cpsInMultiCluster[mclId]),
2179  std::end(cpsInMultiCluster[mclId]),
2180  [](const auto& obj1, const auto& obj2) { return obj1.second < obj2.second; });
2181 
2182  //This is the shared energy taking the best caloparticle in each layer
2183  float sharedeneCPallLayers = 0.;
2184  //Loop through all layers
2185  for (unsigned int j = 0; j < layers * 2; ++j) {
2186  auto const& best_cp_linked = cPOnLayer[best->first][j].layerClusterIdToEnergyAndScore[mclId];
2187  sharedeneCPallLayers += best_cp_linked.first;
2188  } //end of loop through layers
2190  multiClusters[mclId].eta(), sharedeneCPallLayers / multiClusters[mclId].energy());
2192  multiClusters[mclId].phi(), sharedeneCPallLayers / multiClusters[mclId].energy());
2193  }
2194  if (assocFakeMerge >= 2) {
2195  histograms.h_numMerge_multicl_eta[count]->Fill(multiClusters[mclId].eta());
2196  histograms.h_numMerge_multicl_phi[count]->Fill(multiClusters[mclId].phi());
2197  }
2198  }
2199  histograms.h_denom_multicl_eta[count]->Fill(multiClusters[mclId].eta());
2200  histograms.h_denom_multicl_phi[count]->Fill(multiClusters[mclId].phi());
2201  }
2202 }
2203 
2205  int count,
2206  const std::vector<reco::HGCalMultiCluster>& multiClusters,
2207  std::vector<CaloParticle> const& cP,
2208  std::vector<size_t> const& cPIndices,
2209  std::map<DetId, const HGCRecHit*> const& hitMap,
2210  unsigned layers) const {
2211  //Each event to be treated as two events:
2212  //an event in +ve endcap, plus another event in -ve endcap.
2213 
2214  //To keep track of total num of multiclusters
2215  int tnmclmz = 0; //-z
2216  int tnmclpz = 0; //+z
2217  //To count the number of multiclusters with 3 contiguous layers per event.
2218  int tncontmclpz = 0; //+z
2219  int tncontmclmz = 0; //-z
2220  //For the number of multiclusters without 3 contiguous layers per event.
2221  int tnnoncontmclpz = 0; //+z
2222  int tnnoncontmclmz = 0; //-z
2223  //We want to check below the score of cont and non cont multiclusters
2224  std::vector<bool> contmulti;
2225  contmulti.clear();
2226 
2227  //[mclId]-> vector of 2d layer clusters size
2228  std::unordered_map<unsigned int, std::vector<unsigned int>> multiplicity;
2229  //[mclId]-> [layer][cluster size]
2230  std::unordered_map<unsigned int, std::vector<unsigned int>> multiplicity_vs_layer;
2231  //We will need for the scale text option
2232  // unsigned int totallcinmcls = 0;
2233  // for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
2234  // totallcinmcls = totallcinmcls + multiClusters[mclId].clusters().size();
2235  // }
2236 
2237  auto nMultiClusters = multiClusters.size();
2238  //loop through multiclusters of the event
2239  for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
2240  const auto layerClusters = multiClusters[mclId].clusters();
2241  auto nLayerClusters = layerClusters.size();
2242  if (multiClusters[mclId].z() < 0.) {
2243  tnmclmz++;
2244  }
2245  if (multiClusters[mclId].z() > 0.) {
2246  tnmclpz++;
2247  }
2248 
2249  //Total number of layer clusters in multicluster
2250  int tnlcinmcl = 0;
2251 
2252  //To keep track of total num of layer clusters per multicluster
2253  //tnlcinmclperlaypz[layerid], tnlcinmclperlaymz[layerid]
2254  std::vector<int> tnlcinmclperlay(1000, 0); //+z
2255 
2256  //For the layers the multicluster expands to. Will use a set because there would be many
2257  //duplicates and then go back to vector for random access, since they say it is faster.
2258  std::set<int> multicluster_layers;
2259 
2260  bool multiclusterInZplus = false;
2261  bool multiclusterInZminus = false;
2262 
2263  //Loop through layer clusters
2264  for (unsigned int lcId = 0; lcId < nLayerClusters; ++lcId) {
2265  //take the hits and their fraction of the specific layer cluster.
2266  const std::vector<std::pair<DetId, float>>& hits_and_fractions = layerClusters[lcId]->hitsAndFractions();
2267 
2268  //For the multiplicity of the 2d layer clusters in multiclusters
2269  multiplicity[mclId].emplace_back(hits_and_fractions.size());
2270 
2271  const auto firstHitDetId = hits_and_fractions[0].first;
2272  //The layer that the layer cluster belongs to
2273  int layerid = recHitTools_->getLayerWithOffset(firstHitDetId) +
2274  layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
2275  multicluster_layers.insert(layerid);
2276  multiplicity_vs_layer[mclId].emplace_back(layerid);
2277 
2278  tnlcinmclperlay[layerid]++;
2279  tnlcinmcl++;
2280 
2281  if (recHitTools_->zside(firstHitDetId) > 0.) {
2282  multiclusterInZplus = true;
2283  }
2284  if (recHitTools_->zside(firstHitDetId) < 0.) {
2285  multiclusterInZminus = true;
2286  }
2287 
2288  } //end of loop through layerclusters
2289 
2290  //Per layer : Loop 0->99
2291  for (unsigned ilayer = 0; ilayer < layers * 2; ++ilayer) {
2292  if (histograms.h_clusternum_in_multicluster_perlayer[count].count(ilayer) && tnlcinmclperlay[ilayer] != 0) {
2293  histograms.h_clusternum_in_multicluster_perlayer[count].at(ilayer)->Fill((float)tnlcinmclperlay[ilayer]);
2294  }
2295  //For the profile now of 2d layer cluster in multiclusters vs layer number.
2296  if (tnlcinmclperlay[ilayer] != 0) {
2297  histograms.h_clusternum_in_multicluster_vs_layer[count]->Fill((float)ilayer, (float)tnlcinmclperlay[ilayer]);
2298  }
2299  } //end of loop over layers
2300 
2301  //Looking for multiclusters with 3 contiguous layers per event.
2302  std::vector<int> multicluster_layers_vec(multicluster_layers.begin(), multicluster_layers.end());
2303  //Since we want to also check for non contiguous multiclusters
2304  bool contimulti = false;
2305  //Observe that we start from 1 and go up to size - 1 element.
2306  for (unsigned int i = 1; i < multicluster_layers_vec.size() - 1; ++i) {
2307  if ((multicluster_layers_vec[i - 1] + 1 == multicluster_layers_vec[i]) &&
2308  (multicluster_layers_vec[i + 1] - 1 == multicluster_layers_vec[i])) {
2309  //So, this is a multicluster with 3 contiguous layers per event
2310  if (multiclusterInZplus) {
2311  tncontmclpz++;
2312  }
2313  if (multiclusterInZminus) {
2314  tncontmclmz++;
2315  }
2316  contimulti = true;
2317  break;
2318  }
2319  }
2320  //Count non contiguous multiclusters
2321  if (!contimulti) {
2322  if (multiclusterInZplus) {
2323  tnnoncontmclpz++;
2324  }
2325  if (multiclusterInZminus) {
2326  tnnoncontmclmz++;
2327  }
2328  }
2329 
2330  //Save for the score
2331  contmulti.push_back(contimulti);
2332 
2333  histograms.h_clusternum_in_multicluster[count]->Fill(tnlcinmcl);
2334 
2335  for (unsigned int lc = 0; lc < multiplicity[mclId].size(); ++lc) {
2336  //multiplicity of the current LC
2337  float mlp = std::count(std::begin(multiplicity[mclId]), std::end(multiplicity[mclId]), multiplicity[mclId][lc]);
2338  //LogDebug("HGCalValidator") << "mlp %" << (100. * mlp)/ ((float) nLayerClusters) << std::endl;
2339  // histograms.h_multiplicityOfLCinMCL[count]->Fill( mlp , multiplicity[mclId][lc] , 100. / (float) totallcinmcls );
2340  histograms.h_multiplicityOfLCinMCL[count]->Fill(mlp, multiplicity[mclId][lc]);
2341  //When we will plot with the text option we want the entries to be the same
2342  //as the % of the current cell over the whole number of clusters. For this we need an extra histo.
2343  histograms.h_multiplicity_numberOfEventsHistogram[count]->Fill(mlp);
2344  //For the cluster multiplicity vs layer
2345  //First with the -z endcap (V10:0->49)
2346  if (multiplicity_vs_layer[mclId][lc] < layers) {
2347  histograms.h_multiplicityOfLCinMCL_vs_layercluster_zminus[count]->Fill(mlp, multiplicity_vs_layer[mclId][lc]);
2349  } else { //Then for the +z (V10:50->99)
2351  mlp, multiplicity_vs_layer[mclId][lc] - layers);
2352  histograms.h_multiplicity_zplus_numberOfEventsHistogram[count]->Fill(mlp);
2353  }
2354  //For the cluster multiplicity vs cluster energy
2355  histograms.h_multiplicityOfLCinMCL_vs_layerclusterenergy[count]->Fill(mlp, layerClusters[lc]->energy());
2356  }
2357 
2358  histograms.h_multicluster_pt[count]->Fill(multiClusters[mclId].pt());
2359  histograms.h_multicluster_eta[count]->Fill(multiClusters[mclId].eta());
2360  histograms.h_multicluster_phi[count]->Fill(multiClusters[mclId].phi());
2361  histograms.h_multicluster_energy[count]->Fill(multiClusters[mclId].energy());
2362  histograms.h_multicluster_x[count]->Fill(multiClusters[mclId].x());
2363  histograms.h_multicluster_y[count]->Fill(multiClusters[mclId].y());
2364  histograms.h_multicluster_z[count]->Fill(multiClusters[mclId].z());
2365  histograms.h_multicluster_firstlayer[count]->Fill((float)*multicluster_layers.begin());
2366  histograms.h_multicluster_lastlayer[count]->Fill((float)*multicluster_layers.rbegin());
2367  histograms.h_multicluster_layersnum[count]->Fill((float)multicluster_layers.size());
2368 
2369  } //end of loop through multiclusters
2370 
2371  if (tnmclmz != 0) {
2372  histograms.h_multiclusternum[count]->Fill(tnmclmz);
2373  }
2374 
2375  if (tnmclpz != 0) {
2376  histograms.h_multiclusternum[count]->Fill(tnmclpz);
2377  }
2378 
2379  if (tncontmclpz != 0) {
2380  histograms.h_contmulticlusternum[count]->Fill(tncontmclpz);
2381  }
2382 
2383  if (tncontmclmz != 0) {
2384  histograms.h_contmulticlusternum[count]->Fill(tncontmclmz);
2385  }
2386 
2387  if (tnnoncontmclpz != 0) {
2388  histograms.h_noncontmulticlusternum[count]->Fill(tnnoncontmclpz);
2389  }
2390 
2391  if (tnnoncontmclmz != 0) {
2392  histograms.h_noncontmulticlusternum[count]->Fill(tnnoncontmclmz);
2393  }
2394 
2395  multiClusters_to_CaloParticles(histograms, count, multiClusters, cP, cPIndices, hitMap, layers);
2396 }
2397 
2399  const double y1,
2400  const double x2,
2401  const double y2) const { //distance squared
2402  const double dx = x1 - x2;
2403  const double dy = y1 - y2;
2404  return (dx * dx + dy * dy);
2405 } //distance squaredq
2407  const double y1,
2408  const double x2,
2409  const double y2) const { //2-d distance on the layer (x-y)
2410  return std::sqrt(distance2(x1, y1, x2, y2));
2411 }
2412 
2413 void HGVHistoProducerAlgo::setRecHitTools(std::shared_ptr<hgcal::RecHitTools> recHitTools) {
2414  recHitTools_ = recHitTools;
2415 }
2416 
2418  std::map<DetId, const HGCRecHit*> const& hitMap) const {
2419  DetId themaxid;
2420  const std::vector<std::pair<DetId, float>>& hits_and_fractions = cluster.hitsAndFractions();
2421 
2422  double maxene = 0.;
2423  for (std::vector<std::pair<DetId, float>>::const_iterator it_haf = hits_and_fractions.begin();
2424  it_haf != hits_and_fractions.end();
2425  ++it_haf) {
2426  DetId rh_detid = it_haf->first;
2427 
2428  std::map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
2429  const HGCRecHit* hit = itcheck->second;
2430 
2431  if (maxene < hit->energy()) {
2432  maxene = hit->energy();
2433  themaxid = rh_detid;
2434  }
2435  }
2436 
2437  return themaxid;
2438 }
2439 
2440 double HGVHistoProducerAlgo::getEta(double eta) const {
2441  if (useFabsEta_)
2442  return fabs(eta);
2443  else
2444  return eta;
2445 }
#define LogDebug(id)
constexpr float energy() const
Definition: CaloRecHit.h:29
std::vector< dqm::reco::MonitorElement * > h_longdepthbarycentre_zplus
std::vector< dqm::reco::MonitorElement * > h_sharedenergy_caloparticle2multicl_vs_phi
MonitorElement * book1D(TString const &name, TString const &title, int const nchX, double const lowX, double const highX)
Definition: DQMStore.cc:239
std::vector< dqm::reco::MonitorElement * > h_numMerge_multicl_eta
std::vector< dqm::reco::MonitorElement * > h_energyclustered_zminus
FWCore Framework interface EventSetupRecordImplementation h
Helper function to determine trigger accepts.
dqm::reco::MonitorElement * lastLayerEEzm
std::unordered_map< int, dqm::reco::MonitorElement * > h_clusternum_perlayer
std::vector< dqm::reco::MonitorElement * > h_multiplicityOfLCinMCL_vs_layercluster_zplus
std::vector< dqm::reco::MonitorElement * > h_sharedenergy_multicl2caloparticle_vs_phi
std::vector< dqm::reco::MonitorElement * > h_sharedenergy_caloparticle2multicl
std::unordered_map< int, dqm::reco::MonitorElement * > h_denom_caloparticle_eta_perlayer
std::vector< dqm::reco::MonitorElement * > h_energy_vs_score_multicl2caloparticle
std::vector< dqm::reco::MonitorElement * > h_multicluster_x
std::vector< dqm::reco::MonitorElement * > h_multicluster_phi
std::vector< dqm::reco::MonitorElement * > h_denom_multicl_phi
std::unordered_map< int, dqm::reco::MonitorElement * > h_energy_vs_score_layercl2caloparticle_perlayer
std::unordered_map< std::string, dqm::reco::MonitorElement * > h_distancetomaxcell_perthickperlayer_eneweighted
std::vector< dqm::reco::MonitorElement * > h_multiplicity_zplus_numberOfEventsHistogram
std::vector< std::unordered_map< int, dqm::reco::MonitorElement * > > h_clusternum_in_multicluster_perlayer
void bookClusterHistos(DQMStore::IBooker &ibook, Histograms &histograms, unsigned layers, std::vector< int > thicknesses, std::string pathtomatbudfile)
std::unordered_map< int, dqm::reco::MonitorElement * > h_caloparticle_energy
std::vector< dqm::reco::MonitorElement * > h_num_multicl_eta
dqm::reco::MonitorElement * maxlayerzp
void setRecHitTools(std::shared_ptr< hgcal::RecHitTools > recHitTools)
std::vector< dqm::reco::MonitorElement * > h_multiplicity_numberOfEventsHistogram
std::unordered_map< int, dqm::reco::MonitorElement * > h_denom_layercl_eta_perlayer
std::vector< dqm::reco::MonitorElement * > h_denom_caloparticle_phi
std::vector< dqm::reco::MonitorElement * > h_mixedhitscluster_zplus
constexpr uint32_t rawId() const
get the raw id
Definition: DetId.h:57
std::unordered_map< int, dqm::reco::MonitorElement * > h_num_caloparticle_eta_perlayer
std::unordered_map< int, dqm::reco::MonitorElement * > h_sharedenergy_layercl2caloparticle_perlayer
std::vector< dqm::reco::MonitorElement * > h_multiplicity_zminus_numberOfEventsHistogram
std::unordered_map< int, dqm::reco::MonitorElement * > h_sharedenergy_layercl2caloparticle_vs_eta_perlayer
std::unordered_map< int, dqm::reco::MonitorElement * > h_energy_vs_score_caloparticle2layercl_perlayer
int zside(DetId const &)
std::unordered_map< int, dqm::reco::MonitorElement * > h_sharedenergy_caloparticle2layercl_vs_phi_perlayer
std::unordered_map< int, dqm::reco::MonitorElement * > h_caloparticle_pt
const std::vector< SimTrack > & g4Tracks() const
Definition: CaloParticle.h:74
std::vector< dqm::reco::MonitorElement * > h_sharedenergy_multicl2caloparticle_vs_eta
float eta() const
Momentum pseudorapidity. Note this is taken from the simtrack before the calorimeter.
Definition: CaloParticle.h:142
std::unordered_map< int, dqm::reco::MonitorElement * > h_numDup_caloparticle_eta_perlayer
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
Definition: FindCaloHit.cc:19
std::unordered_map< int, dqm::reco::MonitorElement * > h_cellsenedens_perthick
std::unordered_map< int, dqm::reco::MonitorElement * > h_sharedenergy_layercl2caloparticle_vs_phi_perlayer
void fill_info_histos(const Histograms &histograms, unsigned layers) const
void bookMultiClusterHistos(DQMStore::IBooker &ibook, Histograms &histograms, unsigned layers)
std::vector< dqm::reco::MonitorElement * > h_multiplicityOfLCinMCL_vs_layerclusterenergy
std::vector< dqm::reco::MonitorElement * > h_numDup_multicl_eta
std::vector< dqm::reco::MonitorElement * > h_cluster_eta
void bookInfo(DQMStore::IBooker &ibook, Histograms &histograms)
double eta() const
pseudorapidity of cluster centroid
Definition: CaloCluster.h:180
std::vector< dqm::reco::MonitorElement * > h_energyclustered_zplus
void layerClusters_to_CaloParticles(const Histograms &histograms, const reco::CaloClusterCollection &clusters, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::map< DetId, const HGCRecHit * > const &, unsigned layers) const
void fill_multi_cluster_histos(const Histograms &histograms, int count, const std::vector< reco::HGCalMultiCluster > &multiClusters, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::map< DetId, const HGCRecHit * > const &, unsigned layers) const
std::unordered_map< std::string, dqm::reco::MonitorElement * > h_cellsnum_perthickperlayer
std::vector< dqm::reco::MonitorElement * > h_multiplicityOfLCinMCL_vs_layercluster_zminus
void Fill(long long x)
std::vector< dqm::reco::MonitorElement * > h_multicluster_firstlayer
double distance(const double x1, const double y1, const double x2, const double y2) const
std::vector< dqm::reco::MonitorElement * > h_denom_multicl_eta
std::unordered_map< std::string, dqm::reco::MonitorElement * > h_distancebetseedandmaxcell_perthickperlayer
hgcal_clustering::Density Density
Definition: HGCalCLUEAlgo.h:27
const std::vector< std::pair< DetId, float > > & hitsAndFractions() const
Definition: CaloCluster.h:209
std::vector< dqm::reco::MonitorElement * > h_longdepthbarycentre_zminus
std::unordered_map< std::string, dqm::reco::MonitorElement * > h_distancebetseedandmaxcellvsclusterenergy_perthickperlayer
void fill_caloparticle_histos(const Histograms &histograms, int pdgid, const CaloParticle &caloparticle, std::vector< SimVertex > const &simVertices) const
std::unordered_map< int, dqm::reco::MonitorElement * > h_energyclustered_perlayer
std::vector< dqm::reco::MonitorElement * > h_mixedhitscluster_zminus
const double ScoreCutCPtoMCLDup_
Monte Carlo truth information used for tracking validation.
Definition: SimCluster.h:29
std::vector< dqm::reco::MonitorElement * > h_contmulticlusternum
std::unordered_map< int, dqm::reco::MonitorElement * > h_clusternum_perthick
std::unordered_map< int, dqm::reco::MonitorElement * > h_sharedenergy_caloparticle2layercl_perlayer
T sqrt(T t)
Definition: SSEVec.h:19
def unique(seq, keepstr=True)
Definition: tier0.py:24
MonitorElement * bookProfile(TString const &name, TString const &title, int nchX, double lowX, double highX, int nchY, double lowY, double highY, char const *option="s")
Definition: DQMStore.cc:333
void bookCaloParticleHistos(DQMStore::IBooker &ibook, Histograms &histograms, int pdgid)
std::unordered_map< int, dqm::reco::MonitorElement * > h_caloparticle_phi
float energy() const
Energy. Note this is taken from the first SimTrack only.
Definition: CaloParticle.h:98
std::vector< dqm::reco::MonitorElement * > h_multiplicityOfLCinMCL
dqm::reco::MonitorElement * maxlayerzm
std::vector< CaloCluster > CaloClusterCollection
collection of CaloCluster objects
double f[11][100]
std::vector< dqm::reco::MonitorElement * > h_num_caloparticle_phi
double getEta(double eta) const
#define end
Definition: vmac.h:39
T min(T a, T b)
Definition: MathUtil.h:58
dqm::reco::MonitorElement * lastLayerEEzp
std::vector< dqm::reco::MonitorElement * > h_multicluster_z
const double ScoreCutLCtoCP_
std::unordered_map< std::string, dqm::reco::MonitorElement * > h_distancetomaxcell_perthickperlayer
DetId findmaxhit(const reco::CaloCluster &cluster, std::map< DetId, const HGCRecHit * > const &) const
std::vector< dqm::reco::MonitorElement * > h_clusternum_in_multicluster
std::unordered_map< int, dqm::reco::MonitorElement * > h_num_layercl_eta_perlayer
std::vector< dqm::reco::MonitorElement * > h_numMerge_multicl_phi
std::vector< dqm::reco::MonitorElement * > h_sharedenergy_caloparticle2multicl_vs_eta
double distance2(const double x1, const double y1, const double x2, const double y2) const
void multiClusters_to_CaloParticles(const Histograms &histograms, int count, const std::vector< reco::HGCalMultiCluster > &multiClusters, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::map< DetId, const HGCRecHit * > const &, unsigned layers) const
std::vector< dqm::reco::MonitorElement * > h_multicluster_y
std::vector< dqm::reco::MonitorElement * > h_clusternum_in_multicluster_vs_layer
Definition: DetId.h:17
const double ScoreCutMCLtoCPFakeMerge_
std::unordered_map< int, dqm::reco::MonitorElement * > h_numDup_caloparticle_phi_perlayer
std::vector< dqm::reco::MonitorElement * > h_denom_caloparticle_eta
std::unordered_map< int, dqm::reco::MonitorElement * > h_num_layercl_phi_perlayer
std::shared_ptr< hgcal::RecHitTools > recHitTools_
std::vector< dqm::reco::MonitorElement * > h_sharedenergy_multicl2caloparticle
std::unordered_map< int, dqm::reco::MonitorElement * > h_num_caloparticle_phi_perlayer
std::vector< dqm::reco::MonitorElement * > h_multiclusternum
std::unordered_map< int, dqm::reco::MonitorElement * > h_caloparticle_eta_Zorigin
void fill_cluster_histos(const Histograms &histograms, int count, const reco::CaloCluster &cluster) const
float pt() const
Transverse momentum. Note this is taken from the first SimTrack only.
Definition: CaloParticle.h:130
dqm::reco::MonitorElement * lastLayerFHzm
void fill_generic_cluster_histos(const Histograms &histograms, int count, const reco::CaloClusterCollection &clusters, const Density &densities, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::map< DetId, const HGCRecHit * > const &, std::map< double, double > cummatbudg, unsigned layers, std::vector< int > thicknesses) const
dqm::reco::MonitorElement * lastLayerFHzp
std::vector< dqm::reco::MonitorElement * > h_multicluster_layersnum
std::unordered_map< std::string, dqm::reco::MonitorElement * > h_distancetoseedcell_perthickperlayer_eneweighted
float phi() const
Momentum azimuthal angle. Note this is taken from the first SimTrack only.
Definition: CaloParticle.h:134
std::unordered_map< int, dqm::reco::MonitorElement * > h_denom_caloparticle_phi_perlayer
std::vector< dqm::reco::MonitorElement * > h_score_multicl2caloparticle
std::unordered_map< int, dqm::reco::MonitorElement * > h_sharedenergy_caloparticle2layercl_vs_eta_perlayer
#define begin
Definition: vmac.h:32
std::vector< dqm::reco::MonitorElement * > h_num_multicl_phi
MonitorElement * book2D(TString const &name, TString const &title, int nchX, double lowX, double highX, int nchY, double lowY, double highY)
Definition: DQMStore.cc:266
static int position[264][3]
Definition: ReadPGInfo.cc:289
std::vector< dqm::reco::MonitorElement * > h_noncontmulticlusternum
std::vector< dqm::reco::MonitorElement * > h_numDup_multicl_phi
std::vector< dqm::reco::MonitorElement * > h_score_caloparticle2multicl
std::pair< typename Association::data_type::first_type, double > match(Reference key, Association association, bool bestMatchByMaxValue)
Generic matching function.
Definition: Utils.h:10
std::unordered_map< int, dqm::reco::MonitorElement * > h_cellAssociation_perlayer
std::vector< dqm::reco::MonitorElement * > h_multicluster_energy
std::unordered_map< std::string, dqm::reco::MonitorElement * > h_distancetoseedcell_perthickperlayer
std::unordered_map< int, dqm::reco::MonitorElement * > h_denom_layercl_phi_perlayer
std::unordered_map< int, dqm::reco::MonitorElement * > h_caloparticle_eta
std::vector< dqm::reco::MonitorElement * > h_multicluster_eta
std::vector< dqm::reco::MonitorElement * > h_multicluster_pt
const double ScoreCutCPtoLC_
std::unordered_map< int, dqm::reco::MonitorElement * > h_score_layercl2caloparticle_perlayer
std::vector< dqm::reco::MonitorElement * > h_energy_vs_score_caloparticle2multicl
std::unordered_map< int, dqm::reco::MonitorElement * > h_score_caloparticle2layercl_perlayer
MonitorElement * bookInt(TString const &name)
Definition: DQMStore.cc:231
def move(src, dest)
Definition: eostools.py:511
HGVHistoProducerAlgo(const edm::ParameterSet &pset)
std::unordered_map< int, dqm::reco::MonitorElement * > h_numMerge_layercl_eta_perlayer
std::vector< std::pair< uint32_t, float > > hits_and_fractions() const
Returns list of rechit IDs and fractions for this SimCluster.
Definition: SimCluster.h:181
std::unordered_map< int, dqm::reco::MonitorElement * > h_numMerge_layercl_phi_perlayer
std::vector< dqm::reco::MonitorElement * > h_num_caloparticle_eta
constexpr Detector det() const
get the detector field from this detid
Definition: DetId.h:46
std::vector< dqm::reco::MonitorElement * > h_multicluster_lastlayer