CMS 3D CMS Logo

HGVHistoProducerAlgo.cc
Go to the documentation of this file.
1 #include <numeric>
2 #include <iomanip>
3 
7 #include "TMath.h"
8 #include "TLatex.h"
9 #include "TF1.h"
10 
11 using namespace std;
12 
13 //Parameters for the score cut. Later, this will become part of the
14 //configuration parameter for the HGCAL associator.
15 const double ScoreCutLCtoCP_ = 0.1;
16 const double ScoreCutCPtoLC_ = 0.1;
17 const double ScoreCutMCLtoCPFakeMerge_ = 0.6;
18 const double ScoreCutCPtoMCLDup_ = 0.2;
19 
21  : //parameters for eta
22  minEta_(pset.getParameter<double>("minEta")),
23  maxEta_(pset.getParameter<double>("maxEta")),
24  nintEta_(pset.getParameter<int>("nintEta")),
25  useFabsEta_(pset.getParameter<bool>("useFabsEta")),
26 
27  //parameters for energy
28  minEne_(pset.getParameter<double>("minEne")),
29  maxEne_(pset.getParameter<double>("maxEne")),
30  nintEne_(pset.getParameter<int>("nintEne")),
31 
32  //parameters for pt
33  minPt_(pset.getParameter<double>("minPt")),
34  maxPt_(pset.getParameter<double>("maxPt")),
35  nintPt_(pset.getParameter<int>("nintPt")),
36 
37  //parameters for phi
38  minPhi_(pset.getParameter<double>("minPhi")),
39  maxPhi_(pset.getParameter<double>("maxPhi")),
40  nintPhi_(pset.getParameter<int>("nintPhi")),
41 
42  //parameters for counting mixed hits clusters
43  minMixedHitsCluster_(pset.getParameter<double>("minMixedHitsCluster")),
44  maxMixedHitsCluster_(pset.getParameter<double>("maxMixedHitsCluster")),
45  nintMixedHitsCluster_(pset.getParameter<int>("nintMixedHitsCluster")),
46 
47  //parameters for the total amount of energy clustered by all layer clusters (fraction over caloparticles)
48  minEneCl_(pset.getParameter<double>("minEneCl")),
49  maxEneCl_(pset.getParameter<double>("maxEneCl")),
50  nintEneCl_(pset.getParameter<int>("nintEneCl")),
51 
52  //parameters for the longitudinal depth barycenter.
53  minLongDepBary_(pset.getParameter<double>("minLongDepBary")),
54  maxLongDepBary_(pset.getParameter<double>("maxLongDepBary")),
55  nintLongDepBary_(pset.getParameter<int>("nintLongDepBary")),
56 
57  //parameters for z positionof vertex plots
58  minZpos_(pset.getParameter<double>("minZpos")),
59  maxZpos_(pset.getParameter<double>("maxZpos")),
60  nintZpos_(pset.getParameter<int>("nintZpos")),
61 
62  //Parameters for the total number of layer clusters per layer
63  minTotNClsperlay_(pset.getParameter<double>("minTotNClsperlay")),
64  maxTotNClsperlay_(pset.getParameter<double>("maxTotNClsperlay")),
65  nintTotNClsperlay_(pset.getParameter<int>("nintTotNClsperlay")),
66 
67  //Parameters for the energy clustered by layer clusters per layer (fraction over caloparticles)
68  minEneClperlay_(pset.getParameter<double>("minEneClperlay")),
69  maxEneClperlay_(pset.getParameter<double>("maxEneClperlay")),
70  nintEneClperlay_(pset.getParameter<int>("nintEneClperlay")),
71 
72  //Parameters for the score both for:
73  //1. calo particle to layer clusters association per layer
74  //2. layer cluster to calo particles association per layer
75  minScore_(pset.getParameter<double>("minScore")),
76  maxScore_(pset.getParameter<double>("maxScore")),
77  nintScore_(pset.getParameter<int>("nintScore")),
78 
79  //Parameters for shared energy fraction. That is:
80  //1. Fraction of each of the layer clusters energy related to a
81  //calo particle over that calo particle's energy.
82  //2. Fraction of each of the calo particles energy
83  //related to a layer cluster over that layer cluster's energy.
84  minSharedEneFrac_(pset.getParameter<double>("minSharedEneFrac")),
85  maxSharedEneFrac_(pset.getParameter<double>("maxSharedEneFrac")),
86  nintSharedEneFrac_(pset.getParameter<int>("nintSharedEneFrac")),
87 
88  //Same as above for multiclusters
89  minMCLSharedEneFrac_(pset.getParameter<double>("minMCLSharedEneFrac")),
90  maxMCLSharedEneFrac_(pset.getParameter<double>("maxMCLSharedEneFrac")),
91  nintMCLSharedEneFrac_(pset.getParameter<int>("nintMCLSharedEneFrac")),
92 
93  //Parameters for the total number of layer clusters per thickness
94  minTotNClsperthick_(pset.getParameter<double>("minTotNClsperthick")),
95  maxTotNClsperthick_(pset.getParameter<double>("maxTotNClsperthick")),
96  nintTotNClsperthick_(pset.getParameter<int>("nintTotNClsperthick")),
97 
98  //Parameters for the total number of cells per per thickness per layer
99  minTotNcellsperthickperlayer_(pset.getParameter<double>("minTotNcellsperthickperlayer")),
100  maxTotNcellsperthickperlayer_(pset.getParameter<double>("maxTotNcellsperthickperlayer")),
101  nintTotNcellsperthickperlayer_(pset.getParameter<int>("nintTotNcellsperthickperlayer")),
102 
103  //Parameters for the distance of cluster cells to seed cell per thickness per layer
104  minDisToSeedperthickperlayer_(pset.getParameter<double>("minDisToSeedperthickperlayer")),
105  maxDisToSeedperthickperlayer_(pset.getParameter<double>("maxDisToSeedperthickperlayer")),
106  nintDisToSeedperthickperlayer_(pset.getParameter<int>("nintDisToSeedperthickperlayer")),
107 
108  //Parameters for the energy weighted distance of cluster cells to seed cell per thickness per layer
109  minDisToSeedperthickperlayerenewei_(pset.getParameter<double>("minDisToSeedperthickperlayerenewei")),
110  maxDisToSeedperthickperlayerenewei_(pset.getParameter<double>("maxDisToSeedperthickperlayerenewei")),
111  nintDisToSeedperthickperlayerenewei_(pset.getParameter<int>("nintDisToSeedperthickperlayerenewei")),
112 
113  //Parameters for the distance of cluster cells to max cell per thickness per layer
114  minDisToMaxperthickperlayer_(pset.getParameter<double>("minDisToMaxperthickperlayer")),
115  maxDisToMaxperthickperlayer_(pset.getParameter<double>("maxDisToMaxperthickperlayer")),
116  nintDisToMaxperthickperlayer_(pset.getParameter<int>("nintDisToMaxperthickperlayer")),
117 
118  //Parameters for the energy weighted distance of cluster cells to max cell per thickness per layer
119  minDisToMaxperthickperlayerenewei_(pset.getParameter<double>("minDisToMaxperthickperlayerenewei")),
120  maxDisToMaxperthickperlayerenewei_(pset.getParameter<double>("maxDisToMaxperthickperlayerenewei")),
121  nintDisToMaxperthickperlayerenewei_(pset.getParameter<int>("nintDisToMaxperthickperlayerenewei")),
122 
123  //Parameters for the distance of seed cell to max cell per thickness per layer
124  minDisSeedToMaxperthickperlayer_(pset.getParameter<double>("minDisSeedToMaxperthickperlayer")),
125  maxDisSeedToMaxperthickperlayer_(pset.getParameter<double>("maxDisSeedToMaxperthickperlayer")),
126  nintDisSeedToMaxperthickperlayer_(pset.getParameter<int>("nintDisSeedToMaxperthickperlayer")),
127 
128  //Parameters for the energy of a cluster per thickness per layer
129  minClEneperthickperlayer_(pset.getParameter<double>("minClEneperthickperlayer")),
130  maxClEneperthickperlayer_(pset.getParameter<double>("maxClEneperthickperlayer")),
131  nintClEneperthickperlayer_(pset.getParameter<int>("nintClEneperthickperlayer")),
132 
133  //Parameters for the energy density of cluster cells per thickness
134  minCellsEneDensperthick_(pset.getParameter<double>("minCellsEneDensperthick")),
135  maxCellsEneDensperthick_(pset.getParameter<double>("maxCellsEneDensperthick")),
136  nintCellsEneDensperthick_(pset.getParameter<int>("nintCellsEneDensperthick")),
137 
138  //Parameters for the total number of multiclusters per event
139  //We always treet one event as two events, one in +z one in -z
140  minTotNMCLs_(pset.getParameter<double>("minTotNMCLs")),
141  maxTotNMCLs_(pset.getParameter<double>("maxTotNMCLs")),
142  nintTotNMCLs_(pset.getParameter<int>("nintTotNMCLs")),
143 
144  //Parameters for the total number of layer clusters in multicluster
145  minTotNClsinMCLs_(pset.getParameter<double>("minTotNClsinMCLs")),
146  maxTotNClsinMCLs_(pset.getParameter<double>("maxTotNClsinMCLs")),
147  nintTotNClsinMCLs_(pset.getParameter<int>("nintTotNClsinMCLs")),
148 
149  //Parameters for the total number of layer clusters in multicluster per layer
150  minTotNClsinMCLsperlayer_(pset.getParameter<double>("minTotNClsinMCLsperlayer")),
151  maxTotNClsinMCLsperlayer_(pset.getParameter<double>("maxTotNClsinMCLsperlayer")),
152  nintTotNClsinMCLsperlayer_(pset.getParameter<int>("nintTotNClsinMCLsperlayer")),
153 
154  //Parameters for the multiplicity of layer clusters in multicluster
155  minMplofLCs_(pset.getParameter<double>("minMplofLCs")),
156  maxMplofLCs_(pset.getParameter<double>("maxMplofLCs")),
157  nintMplofLCs_(pset.getParameter<int>("nintMplofLCs")),
158 
159  //Parameters for cluster size
160  minSizeCLsinMCLs_(pset.getParameter<double>("minSizeCLsinMCLs")),
161  maxSizeCLsinMCLs_(pset.getParameter<double>("maxSizeCLsinMCLs")),
162  nintSizeCLsinMCLs_(pset.getParameter<int>("nintSizeCLsinMCLs")),
163 
164  //Parameters for the energy of a cluster per thickness per layer
165  minClEnepermultiplicity_(pset.getParameter<double>("minClEnepermultiplicity")),
166  maxClEnepermultiplicity_(pset.getParameter<double>("maxClEnepermultiplicity")),
167  nintClEnepermultiplicity_(pset.getParameter<int>("nintClEnepermultiplicity")),
168 
169  //parameters for x
170  minX_(pset.getParameter<double>("minX")),
171  maxX_(pset.getParameter<double>("maxX")),
172  nintX_(pset.getParameter<int>("nintX")),
173 
174  //parameters for y
175  minY_(pset.getParameter<double>("minY")),
176  maxY_(pset.getParameter<double>("maxY")),
177  nintY_(pset.getParameter<int>("nintY")),
178 
179  //parameters for z
180  minZ_(pset.getParameter<double>("minZ")),
181  maxZ_(pset.getParameter<double>("maxZ")),
182  nintZ_(pset.getParameter<int>("nintZ")) {}
183 
185 
187  histograms.lastLayerEEzm = ibook.bookInt("lastLayerEEzm");
188  histograms.lastLayerFHzm = ibook.bookInt("lastLayerFHzm");
189  histograms.maxlayerzm = ibook.bookInt("maxlayerzm");
190  histograms.lastLayerEEzp = ibook.bookInt("lastLayerEEzp");
191  histograms.lastLayerFHzp = ibook.bookInt("lastLayerFHzp");
192  histograms.maxlayerzp = ibook.bookInt("maxlayerzp");
193 }
194 
196  histograms.h_caloparticle_eta[pdgid] =
197  ibook.book1D("num_caloparticle_eta", "N of caloparticle vs eta", nintEta_, minEta_, maxEta_);
198  histograms.h_caloparticle_eta_Zorigin[pdgid] =
199  ibook.book2D("Eta vs Zorigin", "Eta vs Zorigin", nintEta_, minEta_, maxEta_, nintZpos_, minZpos_, maxZpos_);
200 
201  histograms.h_caloparticle_energy[pdgid] =
202  ibook.book1D("caloparticle_energy", "Energy of caloparticle", nintEne_, minEne_, maxEne_);
203  histograms.h_caloparticle_pt[pdgid] = ibook.book1D("caloparticle_pt", "Pt of caloparticle", nintPt_, minPt_, maxPt_);
204  histograms.h_caloparticle_phi[pdgid] =
205  ibook.book1D("caloparticle_phi", "Phi of caloparticle", nintPhi_, minPhi_, maxPhi_);
206 }
207 
210  unsigned layers,
211  std::vector<int> thicknesses,
212  std::string pathtomatbudfile) {
213  //---------------------------------------------------------------------------------------------------------------------------
214  histograms.h_cluster_eta.push_back(
215  ibook.book1D("num_reco_cluster_eta", "N of reco clusters vs eta", nintEta_, minEta_, maxEta_));
216 
217  //---------------------------------------------------------------------------------------------------------------------------
218  //z-
219  histograms.h_mixedhitscluster_zminus.push_back(
220  ibook.book1D("mixedhitscluster_zminus",
221  "N of reco clusters that contain hits of more than one kind in z-",
225  //z+
226  histograms.h_mixedhitscluster_zplus.push_back(
227  ibook.book1D("mixedhitscluster_zplus",
228  "N of reco clusters that contain hits of more than one kind in z+",
232 
233  //---------------------------------------------------------------------------------------------------------------------------
234  //z-
235  histograms.h_energyclustered_zminus.push_back(
236  ibook.book1D("energyclustered_zminus",
237  "percent of total energy clustered by all layer clusters over caloparticles energy in z-",
238  nintEneCl_,
239  minEneCl_,
240  maxEneCl_));
241  //z+
242  histograms.h_energyclustered_zplus.push_back(
243  ibook.book1D("energyclustered_zplus",
244  "percent of total energy clustered by all layer clusters over caloparticles energy in z+",
245  nintEneCl_,
246  minEneCl_,
247  maxEneCl_));
248 
249  //---------------------------------------------------------------------------------------------------------------------------
250  //z-
251  std::string subpathtomat = pathtomatbudfile.substr(pathtomatbudfile.find("Validation"));
252  histograms.h_longdepthbarycentre_zminus.push_back(
253  ibook.book1D("longdepthbarycentre_zminus",
254  "The longitudinal depth barycentre in z- for " + subpathtomat,
257  maxLongDepBary_));
258  //z+
259  histograms.h_longdepthbarycentre_zplus.push_back(
260  ibook.book1D("longdepthbarycentre_zplus",
261  "The longitudinal depth barycentre in z+ for " + subpathtomat,
264  maxLongDepBary_));
265 
266  //---------------------------------------------------------------------------------------------------------------------------
267  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
268  auto istr1 = std::to_string(ilayer);
269  while (istr1.size() < 2) {
270  istr1.insert(0, "0");
271  }
272  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
273  std::string istr2 = "";
274  //First with the -z endcap
275  if (ilayer < layers) {
276  istr2 = std::to_string(ilayer + 1) + " in z-";
277  } else { //Then for the +z
278  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
279  }
280  histograms.h_clusternum_perlayer[ilayer] = ibook.book1D("totclusternum_layer_" + istr1,
281  "total number of layer clusters for layer " + istr2,
285  histograms.h_energyclustered_perlayer[ilayer] =
286  ibook.book1D("energyclustered_perlayer" + istr1,
287  "percent of total energy clustered by layer clusters over caloparticles energy for layer " + istr2,
291  histograms.h_score_layercl2caloparticle_perlayer[ilayer] =
292  ibook.book1D("Score_layercl2caloparticle_perlayer" + istr1,
293  "Score of Layer Cluster per CaloParticle for layer " + istr2,
294  nintScore_,
295  minScore_,
296  maxScore_);
297  histograms.h_score_caloparticle2layercl_perlayer[ilayer] =
298  ibook.book1D("Score_caloparticle2layercl_perlayer" + istr1,
299  "Score of CaloParticle per Layer Cluster for layer " + istr2,
300  nintScore_,
301  minScore_,
302  maxScore_);
303  histograms.h_energy_vs_score_caloparticle2layercl_perlayer[ilayer] =
304  ibook.book2D("Energy_vs_Score_caloparticle2layer_perlayer" + istr1,
305  "Energy vs Score of CaloParticle per Layer Cluster for layer " + istr2,
306  nintScore_,
307  minScore_,
308  maxScore_,
312  histograms.h_energy_vs_score_layercl2caloparticle_perlayer[ilayer] =
313  ibook.book2D("Energy_vs_Score_layer2caloparticle_perlayer" + istr1,
314  "Energy vs Score of Layer Cluster per CaloParticle Layer for layer " + istr2,
315  nintScore_,
316  minScore_,
317  maxScore_,
321  histograms.h_sharedenergy_caloparticle2layercl_perlayer[ilayer] =
322  ibook.book1D("SharedEnergy_caloparticle2layercl_perlayer" + istr1,
323  "Shared Energy of CaloParticle per Layer Cluster for layer " + istr2,
327  histograms.h_sharedenergy_caloparticle2layercl_vs_eta_perlayer[ilayer] =
328  ibook.bookProfile("SharedEnergy_caloparticle2layercl_vs_eta_perlayer" + istr1,
329  "Shared Energy of CaloParticle vs #eta per best Layer Cluster for layer " + istr2,
330  nintEta_,
331  minEta_,
332  maxEta_,
335  histograms.h_sharedenergy_caloparticle2layercl_vs_phi_perlayer[ilayer] =
336  ibook.bookProfile("SharedEnergy_caloparticle2layercl_vs_phi_perlayer" + istr1,
337  "Shared Energy of CaloParticle vs #phi per best Layer Cluster for layer " + istr2,
338  nintPhi_,
339  minPhi_,
340  maxPhi_,
343  histograms.h_sharedenergy_layercl2caloparticle_perlayer[ilayer] =
344  ibook.book1D("SharedEnergy_layercluster2caloparticle_perlayer" + istr1,
345  "Shared Energy of Layer Cluster per Layer Calo Particle for layer " + istr2,
349  histograms.h_sharedenergy_layercl2caloparticle_vs_eta_perlayer[ilayer] =
350  ibook.bookProfile("SharedEnergy_layercl2caloparticle_vs_eta_perlayer" + istr1,
351  "Shared Energy of LayerCluster vs #eta per best Calo Particle for layer " + istr2,
352  nintEta_,
353  minEta_,
354  maxEta_,
357  histograms.h_sharedenergy_layercl2caloparticle_vs_phi_perlayer[ilayer] =
358  ibook.bookProfile("SharedEnergy_layercl2caloparticle_vs_phi_perlayer" + istr1,
359  "Shared Energy of LayerCluster vs #phi per best Calo Particle for layer " + istr2,
360  nintPhi_,
361  minPhi_,
362  maxPhi_,
365  histograms.h_num_caloparticle_eta_perlayer[ilayer] =
366  ibook.book1D("Num_CaloParticle_Eta_perlayer" + istr1,
367  "Num CaloParticle Eta per Layer Cluster for layer " + istr2,
368  nintEta_,
369  minEta_,
370  maxEta_);
371  histograms.h_numDup_caloparticle_eta_perlayer[ilayer] =
372  ibook.book1D("NumDup_CaloParticle_Eta_perlayer" + istr1,
373  "Num Duplicate CaloParticle Eta per Layer Cluster for layer " + istr2,
374  nintEta_,
375  minEta_,
376  maxEta_);
377  histograms.h_denom_caloparticle_eta_perlayer[ilayer] =
378  ibook.book1D("Denom_CaloParticle_Eta_perlayer" + istr1,
379  "Denom CaloParticle Eta per Layer Cluster for layer " + istr2,
380  nintEta_,
381  minEta_,
382  maxEta_);
383  histograms.h_num_caloparticle_phi_perlayer[ilayer] =
384  ibook.book1D("Num_CaloParticle_Phi_perlayer" + istr1,
385  "Num CaloParticle Phi per Layer Cluster for layer " + istr2,
386  nintPhi_,
387  minPhi_,
388  maxPhi_);
389  histograms.h_numDup_caloparticle_phi_perlayer[ilayer] =
390  ibook.book1D("NumDup_CaloParticle_Phi_perlayer" + istr1,
391  "Num Duplicate CaloParticle Phi per Layer Cluster for layer " + istr2,
392  nintPhi_,
393  minPhi_,
394  maxPhi_);
395  histograms.h_denom_caloparticle_phi_perlayer[ilayer] =
396  ibook.book1D("Denom_CaloParticle_Phi_perlayer" + istr1,
397  "Denom CaloParticle Phi per Layer Cluster for layer " + istr2,
398  nintPhi_,
399  minPhi_,
400  maxPhi_);
401  histograms.h_num_layercl_eta_perlayer[ilayer] =
402  ibook.book1D("Num_LayerCluster_Eta_perlayer" + istr1,
403  "Num LayerCluster Eta per Layer Cluster for layer " + istr2,
404  nintEta_,
405  minEta_,
406  maxEta_);
407  histograms.h_numMerge_layercl_eta_perlayer[ilayer] =
408  ibook.book1D("NumMerge_LayerCluster_Eta_perlayer" + istr1,
409  "Num Merge LayerCluster Eta per Layer Cluster for layer " + istr2,
410  nintEta_,
411  minEta_,
412  maxEta_);
413  histograms.h_denom_layercl_eta_perlayer[ilayer] =
414  ibook.book1D("Denom_LayerCluster_Eta_perlayer" + istr1,
415  "Denom LayerCluster Eta per Layer Cluster for layer " + istr2,
416  nintEta_,
417  minEta_,
418  maxEta_);
419  histograms.h_num_layercl_phi_perlayer[ilayer] =
420  ibook.book1D("Num_LayerCluster_Phi_perlayer" + istr1,
421  "Num LayerCluster Phi per Layer Cluster for layer " + istr2,
422  nintPhi_,
423  minPhi_,
424  maxPhi_);
425  histograms.h_numMerge_layercl_phi_perlayer[ilayer] =
426  ibook.book1D("NumMerge_LayerCluster_Phi_perlayer" + istr1,
427  "Num Merge LayerCluster Phi per Layer Cluster for layer " + istr2,
428  nintPhi_,
429  minPhi_,
430  maxPhi_);
431  histograms.h_denom_layercl_phi_perlayer[ilayer] =
432  ibook.book1D("Denom_LayerCluster_Phi_perlayer" + istr1,
433  "Denom LayerCluster Phi per Layer Cluster for layer " + istr2,
434  nintPhi_,
435  minPhi_,
436  maxPhi_);
437  histograms.h_cellAssociation_perlayer[ilayer] =
438  ibook.book1D("cellAssociation_perlayer" + istr1, "Cell Association for layer " + istr2, 5, -4., 1.);
439  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(2, "TN(purity)");
440  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(3, "FN(ineff.)");
441  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(4, "FP(fake)");
442  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(5, "TP(eff.)");
443  }
444 
445  //---------------------------------------------------------------------------------------------------------------------------
446  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
447  auto istr = std::to_string(*it);
448  histograms.h_clusternum_perthick[(*it)] = ibook.book1D("totclusternum_thick_" + istr,
449  "total number of layer clusters for thickness " + istr,
453  //---
454  histograms.h_cellsenedens_perthick[(*it)] = ibook.book1D("cellsenedens_thick_" + istr,
455  "energy density of cluster cells for thickness " + istr,
459  }
460 
461  //---------------------------------------------------------------------------------------------------------------------------
462  //Not all combination exists but we should keep them all for cross checking reason.
463  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
464  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
465  auto istr1 = std::to_string(*it);
466  auto istr2 = std::to_string(ilayer);
467  while (istr2.size() < 2)
468  istr2.insert(0, "0");
469  auto istr = istr1 + "_" + istr2;
470  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
471  std::string istr3 = "";
472  //First with the -z endcap
473  if (ilayer < layers) {
474  istr3 = std::to_string(ilayer + 1) + " in z- ";
475  } else { //Then for the +z
476  istr3 = std::to_string(ilayer - (layers - 1)) + " in z+ ";
477  }
478  //---
479  histograms.h_cellsnum_perthickperlayer[istr] =
480  ibook.book1D("cellsnum_perthick_perlayer_" + istr,
481  "total number of cells for layer " + istr3 + " for thickness " + istr1,
485  //---
486  histograms.h_distancetoseedcell_perthickperlayer[istr] =
487  ibook.book1D("distancetoseedcell_perthickperlayer_" + istr,
488  "distance of cluster cells to seed cell for layer " + istr3 + " for thickness " + istr1,
492  //---
493  histograms.h_distancetoseedcell_perthickperlayer_eneweighted[istr] = ibook.book1D(
494  "distancetoseedcell_perthickperlayer_eneweighted_" + istr,
495  "energy weighted distance of cluster cells to seed cell for layer " + istr3 + " for thickness " + istr1,
499  //---
500  histograms.h_distancetomaxcell_perthickperlayer[istr] =
501  ibook.book1D("distancetomaxcell_perthickperlayer_" + istr,
502  "distance of cluster cells to max cell for layer " + istr3 + " for thickness " + istr1,
506  //---
507  histograms.h_distancetomaxcell_perthickperlayer_eneweighted[istr] = ibook.book1D(
508  "distancetomaxcell_perthickperlayer_eneweighted_" + istr,
509  "energy weighted distance of cluster cells to max cell for layer " + istr3 + " for thickness " + istr1,
513  //---
514  histograms.h_distancebetseedandmaxcell_perthickperlayer[istr] =
515  ibook.book1D("distancebetseedandmaxcell_perthickperlayer_" + istr,
516  "distance of seed cell to max cell for layer " + istr3 + " for thickness " + istr1,
520  //---
521  histograms.h_distancebetseedandmaxcellvsclusterenergy_perthickperlayer[istr] = ibook.book2D(
522  "distancebetseedandmaxcellvsclusterenergy_perthickperlayer_" + istr,
523  "distance of seed cell to max cell vs cluster energy for layer " + istr3 + " for thickness " + istr1,
530  }
531  }
532  //---------------------------------------------------------------------------------------------------------------------------
533 }
534 
536  histograms.h_score_multicl2caloparticle.push_back(ibook.book1D(
537  "Score_multicl2caloparticle", "Score of Multi Cluster per CaloParticle", nintScore_, minScore_, maxScore_));
538  histograms.h_score_caloparticle2multicl.push_back(ibook.book1D(
539  "Score_caloparticle2multicl", "Score of CaloParticle per Multi Cluster", nintScore_, minScore_, maxScore_));
540  histograms.h_energy_vs_score_multicl2caloparticle.push_back(
541  ibook.book2D("Energy_vs_Score_multi2caloparticle",
542  "Energy vs Score of Multi Cluster per CaloParticle",
543  nintScore_,
544  minScore_,
545  maxScore_,
549  histograms.h_energy_vs_score_caloparticle2multicl.push_back(
550  ibook.book2D("Energy_vs_Score_caloparticle2multi",
551  "Energy vs Score of CaloParticle per Multi Cluster",
552  nintScore_,
553  minScore_,
554  maxScore_,
558 
559  //back to all multiclusters
560  histograms.h_num_multicl_eta.push_back(
561  ibook.book1D("Num_MultiCluster_Eta", "Num MultiCluster Eta per Multi Cluster ", nintEta_, minEta_, maxEta_));
562  histograms.h_numMerge_multicl_eta.push_back(ibook.book1D(
563  "NumMerge_MultiCluster_Eta", "Num Merge MultiCluster Eta per Multi Cluster ", nintEta_, minEta_, maxEta_));
564  histograms.h_denom_multicl_eta.push_back(
565  ibook.book1D("Denom_MultiCluster_Eta", "Denom MultiCluster Eta per Multi Cluster", nintEta_, minEta_, maxEta_));
566  histograms.h_num_multicl_phi.push_back(
567  ibook.book1D("Num_MultiCluster_Phi", "Num MultiCluster Phi per Multi Cluster ", nintPhi_, minPhi_, maxPhi_));
568  histograms.h_numMerge_multicl_phi.push_back(ibook.book1D(
569  "NumMerge_MultiCluster_Phi", "Num Merge MultiCluster Phi per Multi Cluster", nintPhi_, minPhi_, maxPhi_));
570  histograms.h_denom_multicl_phi.push_back(
571  ibook.book1D("Denom_MultiCluster_Phi", "Denom MultiCluster Phi per Multi Cluster", nintPhi_, minPhi_, maxPhi_));
572  histograms.h_sharedenergy_multicl2caloparticle.push_back(
573  ibook.book1D("SharedEnergy_multicluster2caloparticle",
574  "Shared Energy of Multi Cluster per Calo Particle in each layer",
578  histograms.h_sharedenergy_multicl2caloparticle_vs_eta.push_back(
579  ibook.bookProfile("SharedEnergy_multicl2caloparticle_vs_eta",
580  "Shared Energy of MultiCluster vs #eta per best Calo Particle in each layer",
581  nintEta_,
582  minEta_,
583  maxEta_,
586  histograms.h_sharedenergy_multicl2caloparticle_vs_phi.push_back(
587  ibook.bookProfile("SharedEnergy_multicl2caloparticle_vs_phi",
588  "Shared Energy of MultiCluster vs #phi per best Calo Particle in each layer",
589  nintPhi_,
590  minPhi_,
591  maxPhi_,
594  histograms.h_sharedenergy_caloparticle2multicl.push_back(
595  ibook.book1D("SharedEnergy_caloparticle2multicl",
596  "Shared Energy of CaloParticle per Multi Cluster",
600  histograms.h_sharedenergy_caloparticle2multicl_vs_eta.push_back(
601  ibook.bookProfile("SharedEnergy_caloparticle2multicl_vs_eta",
602  "Shared Energy of CaloParticle vs #eta per best Multi Cluster",
603  nintEta_,
604  minEta_,
605  maxEta_,
608  histograms.h_sharedenergy_caloparticle2multicl_vs_phi.push_back(
609  ibook.bookProfile("SharedEnergy_caloparticle2multicl_vs_phi",
610  "Shared Energy of CaloParticle vs #phi per best Multi Cluster",
611  nintPhi_,
612  minPhi_,
613  maxPhi_,
616  histograms.h_num_caloparticle_eta.push_back(
617  ibook.book1D("Num_CaloParticle_Eta", "Num CaloParticle Eta per Multi Cluster", nintEta_, minEta_, maxEta_));
618  histograms.h_numDup_multicl_eta.push_back(
619  ibook.book1D("NumDup_MultiCluster_Eta", "Num Duplicate MultiCl vs Eta", nintEta_, minEta_, maxEta_));
620  histograms.h_denom_caloparticle_eta.push_back(
621  ibook.book1D("Denom_CaloParticle_Eta", "Denom CaloParticle Eta per Multi Cluster", nintEta_, minEta_, maxEta_));
622  histograms.h_num_caloparticle_phi.push_back(
623  ibook.book1D("Num_CaloParticle_Phi", "Num CaloParticle Phi per Multi Cluster", nintPhi_, minPhi_, maxPhi_));
624  histograms.h_numDup_multicl_phi.push_back(
625  ibook.book1D("NumDup_MultiCluster_Phi", "Num Duplicate MultiCl vs Phi", nintPhi_, minPhi_, maxPhi_));
626  histograms.h_denom_caloparticle_phi.push_back(
627  ibook.book1D("Denom_CaloParticle_Phi", "Denom CaloParticle Phi per Multi Cluster", nintPhi_, minPhi_, maxPhi_));
628 
629  std::unordered_map<int, dqm::reco::MonitorElement*> clusternum_in_multicluster_perlayer;
630  clusternum_in_multicluster_perlayer.clear();
631 
632  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
633  auto istr1 = std::to_string(ilayer);
634  while (istr1.size() < 2) {
635  istr1.insert(0, "0");
636  }
637  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
638  std::string istr2 = "";
639  //First with the -z endcap
640  if (ilayer < layers) {
641  istr2 = std::to_string(ilayer + 1) + " in z-";
642  } else { //Then for the +z
643  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
644  }
645 
646  clusternum_in_multicluster_perlayer[ilayer] =
647  ibook.book1D("clusternum_in_multicluster_perlayer" + istr1,
648  "Number of layer clusters in multicluster for layer " + istr2,
652  }
653 
654  histograms.h_clusternum_in_multicluster_perlayer.push_back(std::move(clusternum_in_multicluster_perlayer));
655 
656  histograms.h_multiclusternum.push_back(
657  ibook.book1D("totmulticlusternum", "total number of multiclusters", nintTotNMCLs_, minTotNMCLs_, maxTotNMCLs_));
658 
659  histograms.h_contmulticlusternum.push_back(ibook.book1D("contmulticlusternum",
660  "number of multiclusters with 3 contiguous layers",
662  minTotNMCLs_,
663  maxTotNMCLs_));
664 
665  histograms.h_noncontmulticlusternum.push_back(ibook.book1D("noncontmulticlusternum",
666  "number of multiclusters without 3 contiguous layers",
668  minTotNMCLs_,
669  maxTotNMCLs_));
670 
671  histograms.h_clusternum_in_multicluster.push_back(ibook.book1D("clusternum_in_multicluster",
672  "total number of layer clusters in multicluster",
676 
677  histograms.h_clusternum_in_multicluster_vs_layer.push_back(
678  ibook.bookProfile("clusternum_in_multicluster_vs_layer",
679  "Profile of 2d layer clusters in multicluster vs layer number",
680  2 * layers,
681  0.,
682  2. * layers,
685 
686  histograms.h_multiplicityOfLCinMCL.push_back(ibook.book2D("multiplicityOfLCinMCL",
687  "Multiplicity vs Layer cluster size in Multiclusters",
689  minMplofLCs_,
690  maxMplofLCs_,
694 
695  histograms.h_multiplicity_numberOfEventsHistogram.push_back(ibook.book1D("multiplicity_numberOfEventsHistogram",
696  "multiplicity numberOfEventsHistogram",
698  minMplofLCs_,
699  maxMplofLCs_));
700 
701  histograms.h_multiplicity_zminus_numberOfEventsHistogram.push_back(
702  ibook.book1D("multiplicity_zminus_numberOfEventsHistogram",
703  "multiplicity numberOfEventsHistogram in z-",
705  minMplofLCs_,
706  maxMplofLCs_));
707 
708  histograms.h_multiplicity_zplus_numberOfEventsHistogram.push_back(
709  ibook.book1D("multiplicity_zplus_numberOfEventsHistogram",
710  "multiplicity numberOfEventsHistogram in z+",
712  minMplofLCs_,
713  maxMplofLCs_));
714 
715  histograms.h_multiplicityOfLCinMCL_vs_layercluster_zminus.push_back(
716  ibook.book2D("multiplicityOfLCinMCL_vs_layercluster_zminus",
717  "Multiplicity vs Layer number in z-",
719  minMplofLCs_,
720  maxMplofLCs_,
721  layers,
722  0.,
723  (float)layers));
724 
725  histograms.h_multiplicityOfLCinMCL_vs_layercluster_zplus.push_back(
726  ibook.book2D("multiplicityOfLCinMCL_vs_layercluster_zplus",
727  "Multiplicity vs Layer number in z+",
729  minMplofLCs_,
730  maxMplofLCs_,
731  layers,
732  0.,
733  (float)layers));
734 
735  histograms.h_multiplicityOfLCinMCL_vs_layerclusterenergy.push_back(
736  ibook.book2D("multiplicityOfLCinMCL_vs_layerclusterenergy",
737  "Multiplicity vs Layer cluster energy",
739  minMplofLCs_,
740  maxMplofLCs_,
744 
745  histograms.h_multicluster_pt.push_back(
746  ibook.book1D("multicluster_pt", "Pt of the multicluster", nintPt_, minPt_, maxPt_));
747  histograms.h_multicluster_eta.push_back(
748  ibook.book1D("multicluster_eta", "Eta of the multicluster", nintEta_, minEta_, maxEta_));
749  histograms.h_multicluster_phi.push_back(
750  ibook.book1D("multicluster_phi", "Phi of the multicluster", nintPhi_, minPhi_, maxPhi_));
751  histograms.h_multicluster_energy.push_back(
752  ibook.book1D("multicluster_energy", "Energy of the multicluster", nintEne_, minEne_, maxEne_));
753  histograms.h_multicluster_x.push_back(
754  ibook.book1D("multicluster_x", "X position of the multicluster", nintX_, minX_, maxX_));
755  histograms.h_multicluster_y.push_back(
756  ibook.book1D("multicluster_y", "Y position of the multicluster", nintY_, minY_, maxY_));
757  histograms.h_multicluster_z.push_back(
758  ibook.book1D("multicluster_z", "Z position of the multicluster", nintZ_, minZ_, maxZ_));
759  histograms.h_multicluster_firstlayer.push_back(
760  ibook.book1D("multicluster_firstlayer", "First layer of the multicluster", 2 * layers, 0., (float)2 * layers));
761  histograms.h_multicluster_lastlayer.push_back(
762  ibook.book1D("multicluster_lastlayer", "Last layer of the multicluster", 2 * layers, 0., (float)2 * layers));
763  histograms.h_multicluster_layersnum.push_back(ibook.book1D(
764  "multicluster_layersnum", "Number of layers of the multicluster", 2 * layers, 0., (float)2 * layers));
765 }
766 
768  //We will save some info straight from geometry to avoid mistakes from updates
769  //----------- TODO ----------------------------------------------------------
770  //For now values returned for 'lastLayerFHzp': '104', 'lastLayerFHzm': '52' are not the one expected.
771  //Will come back to this when there will be info in CMSSW to put in DQM file.
772  histograms.lastLayerEEzm->Fill(recHitTools_->lastLayerEE());
773  histograms.lastLayerFHzm->Fill(recHitTools_->lastLayerFH());
774  histograms.maxlayerzm->Fill(layers);
775  histograms.lastLayerEEzp->Fill(recHitTools_->lastLayerEE() + layers);
776  histograms.lastLayerFHzp->Fill(recHitTools_->lastLayerFH() + layers);
777  histograms.maxlayerzp->Fill(layers + layers);
778 }
779 
781  int pdgid,
782  const CaloParticle& caloparticle,
783  std::vector<SimVertex> const& simVertices) const {
784  const auto eta = getEta(caloparticle.eta());
785  if (histograms.h_caloparticle_eta.count(pdgid)) {
786  histograms.h_caloparticle_eta.at(pdgid)->Fill(eta);
787  }
788  if (histograms.h_caloparticle_eta_Zorigin.count(pdgid)) {
789  histograms.h_caloparticle_eta_Zorigin.at(pdgid)->Fill(
790  simVertices.at(caloparticle.g4Tracks()[0].vertIndex()).position().z(), eta);
791  }
792 
793  if (histograms.h_caloparticle_energy.count(pdgid)) {
794  histograms.h_caloparticle_energy.at(pdgid)->Fill(caloparticle.energy());
795  }
796  if (histograms.h_caloparticle_pt.count(pdgid)) {
797  histograms.h_caloparticle_pt.at(pdgid)->Fill(caloparticle.pt());
798  }
799  if (histograms.h_caloparticle_phi.count(pdgid)) {
800  histograms.h_caloparticle_phi.at(pdgid)->Fill(caloparticle.phi());
801  }
802 }
803 
805  int count,
806  const reco::CaloCluster& cluster) const {
807  const auto eta = getEta(cluster.eta());
808  histograms.h_cluster_eta[count]->Fill(eta);
809 }
810 
813  std::vector<CaloParticle> const& cP,
814  std::vector<size_t> const& cPIndices,
815  std::vector<size_t> const& cPSelectedIndices,
816  std::map<DetId, const HGCRecHit*> const& hitMap,
817  unsigned layers) const {
818  auto nLayerClusters = clusters.size();
819  //Consider CaloParticles coming from the hard scatterer, excluding the PU contribution.
820  auto nCaloParticles = cPIndices.size();
821 
822  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>> detIdToCaloParticleId_Map;
823  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>> detIdToLayerClusterId_Map;
824 
825  // this contains the ids of the caloparticles contributing with at least one hit to the layer cluster and the reconstruction error
826  std::vector<std::vector<std::pair<unsigned int, float>>> cpsInLayerCluster;
827  cpsInLayerCluster.resize(nLayerClusters);
828 
829  std::unordered_map<int, std::vector<caloParticleOnLayer>> cPOnLayer;
830  // Initialization of cPOnLayer
831  for (unsigned int i = 0; i < nCaloParticles; ++i) {
832  auto cpIndex = cPIndices[i];
833  cPOnLayer[cpIndex].resize(layers * 2);
834  for (unsigned int j = 0; j < layers * 2; ++j) {
835  cPOnLayer[cpIndex][j].caloParticleId = cpIndex;
836  cPOnLayer[cpIndex][j].energy = 0.f;
837  cPOnLayer[cpIndex][j].hits_and_fractions.clear();
838  }
839  }
840 
841  // The association has to be done in an all-vs-all fashion.
842  // For this reason we use the full set of caloParticles, with the only filter on bx
843  for (const auto& cpId : cPIndices) {
844  const SimClusterRefVector& simClusterRefVector = cP[cpId].simClusters();
845  for (const auto& it_sc : simClusterRefVector) {
846  const SimCluster& simCluster = (*(it_sc));
847  const auto& hits_and_fractions = simCluster.hits_and_fractions();
848  for (const auto& it_haf : hits_and_fractions) {
849  DetId hitid = (it_haf.first);
850  int cpLayerId = recHitTools_->getLayerWithOffset(hitid) + layers * ((recHitTools_->zside(hitid) + 1) >> 1) - 1;
851  std::map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(hitid);
852  if (itcheck != hitMap.end()) {
853  const HGCRecHit* hit = itcheck->second;
854  auto hit_find_it = detIdToCaloParticleId_Map.find(hitid);
855  if (hit_find_it == detIdToCaloParticleId_Map.end()) {
856  detIdToCaloParticleId_Map[hitid] = std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>();
857  detIdToCaloParticleId_Map[hitid].emplace_back(
858  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
859  } else {
860  auto findHitIt = std::find(detIdToCaloParticleId_Map[hitid].begin(),
861  detIdToCaloParticleId_Map[hitid].end(),
862  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
863  if (findHitIt != detIdToCaloParticleId_Map[hitid].end()) {
864  findHitIt->fraction += it_haf.second;
865  } else {
866  detIdToCaloParticleId_Map[hitid].emplace_back(
867  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
868  }
869  }
870  cPOnLayer[cpId][cpLayerId].energy += it_haf.second * hit->energy();
871  // We need to compress the hits and fractions in order to have a
872  // reasonable score between CP and LC. Imagine, for example, that a
873  // CP has detID X used by 2 SimClusters with different fractions. If
874  // a single LC uses X with fraction 1 and is compared to the 2
875  // contributions separately, it will be assigned a score != 0, which
876  // is wrong.
877  auto& haf = cPOnLayer[cpId][cpLayerId].hits_and_fractions;
878  auto found = std::find_if(
879  std::begin(haf), std::end(haf), [&hitid](const std::pair<DetId, float>& v) { return v.first == hitid; });
880  if (found != haf.end()) {
881  found->second += it_haf.second;
882  } else {
883  cPOnLayer[cpId][cpLayerId].hits_and_fractions.emplace_back(hitid, it_haf.second);
884  }
885  }
886  }
887  }
888  }
889 
890  LogDebug("HGCalValidator") << "cPOnLayer INFO" << std::endl;
891  for (size_t cp = 0; cp < cPOnLayer.size(); ++cp) {
892  LogDebug("HGCalValidator") << "For CaloParticle Idx: " << cp << " we have: " << std::endl;
893  for (size_t cpp = 0; cpp < cPOnLayer[cp].size(); ++cpp) {
894  LogDebug("HGCalValidator") << " On Layer: " << cpp << " we have:" << std::endl;
895  LogDebug("HGCalValidator") << " CaloParticleIdx: " << cPOnLayer[cp][cpp].caloParticleId << std::endl;
896  LogDebug("HGCalValidator") << " Energy: " << cPOnLayer[cp][cpp].energy << std::endl;
897  double tot_energy = 0.;
898  for (auto const& haf : cPOnLayer[cp][cpp].hits_and_fractions) {
899  LogDebug("HGCalValidator") << " Hits/fraction/energy: " << (uint32_t)haf.first << "/" << haf.second << "/"
900  << haf.second * hitMap.at(haf.first)->energy() << std::endl;
901  tot_energy += haf.second * hitMap.at(haf.first)->energy();
902  }
903  LogDebug("HGCalValidator") << " Tot Sum haf: " << tot_energy << std::endl;
904  for (auto const& lc : cPOnLayer[cp][cpp].layerClusterIdToEnergyAndScore) {
905  LogDebug("HGCalValidator") << " lcIdx/energy/score: " << lc.first << "/" << lc.second.first << "/"
906  << lc.second.second << std::endl;
907  }
908  }
909  }
910 
911  LogDebug("HGCalValidator") << "detIdToCaloParticleId_Map INFO" << std::endl;
912  for (auto const& cp : detIdToCaloParticleId_Map) {
913  LogDebug("HGCalValidator") << "For detId: " << (uint32_t)cp.first
914  << " we have found the following connections with CaloParticles:" << std::endl;
915  for (auto const& cpp : cp.second) {
916  LogDebug("HGCalValidator") << " CaloParticle Id: " << cpp.clusterId << " with fraction: " << cpp.fraction
917  << " and energy: " << cpp.fraction * hitMap.at(cp.first)->energy() << std::endl;
918  }
919  }
920 
921  for (unsigned int lcId = 0; lcId < nLayerClusters; ++lcId) {
922  const std::vector<std::pair<DetId, float>>& hits_and_fractions = clusters[lcId].hitsAndFractions();
923  unsigned int numberOfHitsInLC = hits_and_fractions.size();
924 
925  // This vector will store, for each hit in the Layercluster, the index of
926  // the CaloParticle that contributed the most, in terms of energy, to it.
927  // Special values are:
928  //
929  // -2 --> the reconstruction fraction of the RecHit is 0 (used in the past to monitor Halo Hits)
930  // -3 --> same as before with the added condition that no CaloParticle has been linked to this RecHit
931  // -1 --> the reco fraction is >0, but no CaloParticle has been linked to it
932  // >=0 --> index of the linked CaloParticle
933  std::vector<int> hitsToCaloParticleId(numberOfHitsInLC);
934  const auto firstHitDetId = hits_and_fractions[0].first;
935  int lcLayerId =
936  recHitTools_->getLayerWithOffset(firstHitDetId) + layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
937 
938  // This will store the index of the CaloParticle linked to the LayerCluster that has the most number of hits in common.
939  int maxCPId_byNumberOfHits = -1;
940  // This will store the maximum number of shared hits between a Layercluster andd a CaloParticle
941  unsigned int maxCPNumberOfHitsInLC = 0;
942  // This will store the index of the CaloParticle linked to the LayerCluster that has the most energy in common.
943  //
944  int maxCPId_byEnergy = -1;
945  // This will store the maximum number of shared energy between a Layercluster and a CaloParticle
946  float maxEnergySharedLCandCP = 0.f;
947  // This will store the fraction of the LayerCluster energy shared with the best(energy) CaloParticle: e_shared/lc_energy
948  float energyFractionOfLCinCP = 0.f;
949  // This will store the fraction of the CaloParticle energy shared with the LayerCluster: e_shared/cp_energy
950  float energyFractionOfCPinLC = 0.f;
951  std::unordered_map<unsigned, unsigned> occurrencesCPinLC;
952  std::unordered_map<unsigned, float> CPEnergyInLC;
953  unsigned int numberOfNoiseHitsInLC = 0;
954  unsigned int numberOfHaloHitsInLC = 0;
955 
956  for (unsigned int hitId = 0; hitId < numberOfHitsInLC; hitId++) {
957  DetId rh_detid = hits_and_fractions[hitId].first;
958  auto rhFraction = hits_and_fractions[hitId].second;
959 
960  std::map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
961  const HGCRecHit* hit = itcheck->second;
962 
963  auto hit_find_in_LC = detIdToLayerClusterId_Map.find(rh_detid);
964  if (hit_find_in_LC == detIdToLayerClusterId_Map.end()) {
965  detIdToLayerClusterId_Map[rh_detid] = std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>();
966  }
967  detIdToLayerClusterId_Map[rh_detid].emplace_back(HGVHistoProducerAlgo::detIdInfoInCluster{lcId, rhFraction});
968 
969  auto hit_find_in_CP = detIdToCaloParticleId_Map.find(rh_detid);
970 
971  // if the fraction is zero or the hit does not belong to any calo
972  // particle, set the caloparticleId for the hit to -1 this will
973  // contribute to the number of noise hits
974 
975  // MR Remove the case in which the fraction is 0, since this could be a
976  // real hit that has been marked as halo.
977  if (rhFraction == 0.) {
978  hitsToCaloParticleId[hitId] = -2;
979  numberOfHaloHitsInLC++;
980  }
981  if (hit_find_in_CP == detIdToCaloParticleId_Map.end()) {
982  hitsToCaloParticleId[hitId] -= 1;
983  } else {
984  auto maxCPEnergyInLC = 0.f;
985  auto maxCPId = -1;
986  for (auto& h : hit_find_in_CP->second) {
987  CPEnergyInLC[h.clusterId] += h.fraction * hit->energy();
988  cPOnLayer[h.clusterId][lcLayerId].layerClusterIdToEnergyAndScore[lcId].first += h.fraction * hit->energy();
989  cPOnLayer[h.clusterId][lcLayerId].layerClusterIdToEnergyAndScore[lcId].second = FLT_MAX;
990  cpsInLayerCluster[lcId].emplace_back(std::make_pair<int, float>(h.clusterId, FLT_MAX));
991  // Keep track of which CaloParticle ccontributed the most, in terms
992  // of energy, to this specific LayerCluster.
993  if (CPEnergyInLC[h.clusterId] > maxCPEnergyInLC) {
994  maxCPEnergyInLC = CPEnergyInLC[h.clusterId];
995  maxCPId = h.clusterId;
996  }
997  }
998  hitsToCaloParticleId[hitId] = maxCPId;
999  }
1000  histograms.h_cellAssociation_perlayer.at(lcLayerId)->Fill(
1001  hitsToCaloParticleId[hitId] > 0. ? 0. : hitsToCaloParticleId[hitId]);
1002  } // End loop over hits on a LayerCluster
1003 
1004  for (auto& c : hitsToCaloParticleId) {
1005  if (c < 0) {
1006  numberOfNoiseHitsInLC++;
1007  } else {
1008  occurrencesCPinLC[c]++;
1009  }
1010  }
1011 
1012  for (auto& c : occurrencesCPinLC) {
1013  if (c.second > maxCPNumberOfHitsInLC) {
1014  maxCPId_byNumberOfHits = c.first;
1015  maxCPNumberOfHitsInLC = c.second;
1016  }
1017  }
1018 
1019  for (auto& c : CPEnergyInLC) {
1020  if (c.second > maxEnergySharedLCandCP) {
1021  maxCPId_byEnergy = c.first;
1022  maxEnergySharedLCandCP = c.second;
1023  }
1024  }
1025  float totalCPEnergyOnLayer = 0.f;
1026  if (maxCPId_byEnergy >= 0) {
1027  totalCPEnergyOnLayer = cPOnLayer[maxCPId_byEnergy][lcLayerId].energy;
1028  energyFractionOfCPinLC = maxEnergySharedLCandCP / totalCPEnergyOnLayer;
1029  if (clusters[lcId].energy() > 0.f) {
1030  energyFractionOfLCinCP = maxEnergySharedLCandCP / clusters[lcId].energy();
1031  }
1032  }
1033  LogDebug("HGCalValidator") << std::setw(10) << "LayerId:"
1034  << "\t" << std::setw(12) << "layerCluster"
1035  << "\t" << std::setw(10) << "lc energy"
1036  << "\t" << std::setw(5) << "nhits"
1037  << "\t" << std::setw(12) << "noise hits"
1038  << "\t" << std::setw(22) << "maxCPId_byNumberOfHits"
1039  << "\t" << std::setw(8) << "nhitsCP"
1040  << "\t" << std::setw(13) << "maxCPId_byEnergy"
1041  << "\t" << std::setw(20) << "maxEnergySharedLCandCP"
1042  << "\t" << std::setw(22) << "totalCPEnergyOnLayer"
1043  << "\t" << std::setw(22) << "energyFractionOfLCinCP"
1044  << "\t" << std::setw(25) << "energyFractionOfCPinLC"
1045  << "\t"
1046  << "\n";
1047  LogDebug("HGCalValidator") << std::setw(10) << lcLayerId << "\t" << std::setw(12) << lcId << "\t" << std::setw(10)
1048  << clusters[lcId].energy() << "\t" << std::setw(5) << numberOfHitsInLC << "\t"
1049  << std::setw(12) << numberOfNoiseHitsInLC << "\t" << std::setw(22)
1050  << maxCPId_byNumberOfHits << "\t" << std::setw(8) << maxCPNumberOfHitsInLC << "\t"
1051  << std::setw(13) << maxCPId_byEnergy << "\t" << std::setw(20) << maxEnergySharedLCandCP
1052  << "\t" << std::setw(22) << totalCPEnergyOnLayer << "\t" << std::setw(22)
1053  << energyFractionOfLCinCP << "\t" << std::setw(25) << energyFractionOfCPinLC << "\n";
1054  } // End of loop over LayerClusters
1055 
1056  LogDebug("HGCalValidator") << "Improved cPOnLayer INFO" << std::endl;
1057  for (size_t cp = 0; cp < cPOnLayer.size(); ++cp) {
1058  LogDebug("HGCalValidator") << "For CaloParticle Idx: " << cp << " we have: " << std::endl;
1059  for (size_t cpp = 0; cpp < cPOnLayer[cp].size(); ++cpp) {
1060  LogDebug("HGCalValidator") << " On Layer: " << cpp << " we have:" << std::endl;
1061  LogDebug("HGCalValidator") << " CaloParticleIdx: " << cPOnLayer[cp][cpp].caloParticleId << std::endl;
1062  LogDebug("HGCalValidator") << " Energy: " << cPOnLayer[cp][cpp].energy << std::endl;
1063  double tot_energy = 0.;
1064  for (auto const& haf : cPOnLayer[cp][cpp].hits_and_fractions) {
1065  LogDebug("HGCalValidator") << " Hits/fraction/energy: " << (uint32_t)haf.first << "/" << haf.second << "/"
1066  << haf.second * hitMap.at(haf.first)->energy() << std::endl;
1067  tot_energy += haf.second * hitMap.at(haf.first)->energy();
1068  }
1069  LogDebug("HGCalValidator") << " Tot Sum haf: " << tot_energy << std::endl;
1070  for (auto const& lc : cPOnLayer[cp][cpp].layerClusterIdToEnergyAndScore) {
1071  LogDebug("HGCalValidator") << " lcIdx/energy/score: " << lc.first << "/" << lc.second.first << "/"
1072  << lc.second.second << std::endl;
1073  }
1074  }
1075  }
1076 
1077  LogDebug("HGCalValidator") << "Improved detIdToCaloParticleId_Map INFO" << std::endl;
1078  for (auto const& cp : detIdToCaloParticleId_Map) {
1079  LogDebug("HGCalValidator") << "For detId: " << (uint32_t)cp.first
1080  << " we have found the following connections with CaloParticles:" << std::endl;
1081  for (auto const& cpp : cp.second) {
1082  LogDebug("HGCalValidator") << " CaloParticle Id: " << cpp.clusterId << " with fraction: " << cpp.fraction
1083  << " and energy: " << cpp.fraction * hitMap.at(cp.first)->energy() << std::endl;
1084  }
1085  }
1086 
1087  // Here we do fill the plots to compute the different metrics linked to
1088  // reco-level, namely fake-rate an merge-rate. In this loop we should *not*
1089  // restrict only to the selected caloParaticles.
1090  for (unsigned int lcId = 0; lcId < nLayerClusters; ++lcId) {
1091  // find the unique caloparticles id contributing to the layer clusters
1092  std::sort(cpsInLayerCluster[lcId].begin(), cpsInLayerCluster[lcId].end());
1093  auto last = std::unique(cpsInLayerCluster[lcId].begin(), cpsInLayerCluster[lcId].end());
1094  cpsInLayerCluster[lcId].erase(last, cpsInLayerCluster[lcId].end());
1095  const std::vector<std::pair<DetId, float>>& hits_and_fractions = clusters[lcId].hitsAndFractions();
1096  unsigned int numberOfHitsInLC = hits_and_fractions.size();
1097  auto firstHitDetId = hits_and_fractions[0].first;
1098  int lcLayerId =
1099  recHitTools_->getLayerWithOffset(firstHitDetId) + layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
1100  // If a reconstructed LayerCluster has energy 0 but is linked to a CaloParticle, assigned score 1
1101  if (clusters[lcId].energy() == 0. && !cpsInLayerCluster[lcId].empty()) {
1102  for (auto& cpPair : cpsInLayerCluster[lcId]) {
1103  cpPair.second = 1.;
1104  LogDebug("HGCalValidator") << "layerCluster Id: \t" << lcId << "\t CP id: \t" << cpPair.first << "\t score \t"
1105  << cpPair.second << "\n";
1106  histograms.h_score_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(cpPair.second);
1107  }
1108  continue;
1109  }
1110 
1111  // Compute the correct normalization
1112  float invLayerClusterEnergyWeight = 0.f;
1113  for (auto const& haf : clusters[lcId].hitsAndFractions()) {
1114  invLayerClusterEnergyWeight +=
1115  (haf.second * hitMap.at(haf.first)->energy()) * (haf.second * hitMap.at(haf.first)->energy());
1116  }
1117  invLayerClusterEnergyWeight = 1.f / invLayerClusterEnergyWeight;
1118 
1119  for (unsigned int i = 0; i < numberOfHitsInLC; ++i) {
1120  DetId rh_detid = hits_and_fractions[i].first;
1121  float rhFraction = hits_and_fractions[i].second;
1122  bool hitWithNoCP = false;
1123 
1124  auto hit_find_in_CP = detIdToCaloParticleId_Map.find(rh_detid);
1125  if (hit_find_in_CP == detIdToCaloParticleId_Map.end())
1126  hitWithNoCP = true;
1127  auto itcheck = hitMap.find(rh_detid);
1128  const HGCRecHit* hit = itcheck->second;
1129  float hitEnergyWeight = hit->energy() * hit->energy();
1130 
1131  for (auto& cpPair : cpsInLayerCluster[lcId]) {
1132  float cpFraction = 0.f;
1133  if (!hitWithNoCP) {
1134  auto findHitIt = std::find(detIdToCaloParticleId_Map[rh_detid].begin(),
1135  detIdToCaloParticleId_Map[rh_detid].end(),
1136  HGVHistoProducerAlgo::detIdInfoInCluster{cpPair.first, 0.f});
1137  if (findHitIt != detIdToCaloParticleId_Map[rh_detid].end())
1138  cpFraction = findHitIt->fraction;
1139  }
1140  if (cpPair.second == FLT_MAX) {
1141  cpPair.second = 0.f;
1142  }
1143  cpPair.second +=
1144  (rhFraction - cpFraction) * (rhFraction - cpFraction) * hitEnergyWeight * invLayerClusterEnergyWeight;
1145  }
1146  } // End of loop over Hits within a LayerCluster
1147 
1148  if (cpsInLayerCluster[lcId].empty())
1149  LogDebug("HGCalValidator") << "layerCluster Id: \t" << lcId << "\tCP id:\t-1 "
1150  << "\t score \t-1"
1151  << "\n";
1152 
1153  for (auto& cpPair : cpsInLayerCluster[lcId]) {
1154  LogDebug("HGCalValidator") << "layerCluster Id: \t" << lcId << "\t CP id: \t" << cpPair.first << "\t score \t"
1155  << cpPair.second << "\n";
1156  histograms.h_score_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(cpPair.second);
1157  auto const& cp_linked = cPOnLayer[cpPair.first][lcLayerId].layerClusterIdToEnergyAndScore[lcId];
1158  histograms.h_sharedenergy_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(
1159  cp_linked.first / clusters[lcId].energy(), clusters[lcId].energy());
1160  histograms.h_energy_vs_score_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(
1161  cpPair.second, cp_linked.first / clusters[lcId].energy());
1162  }
1163 
1164  auto assoc = std::count_if(std::begin(cpsInLayerCluster[lcId]),
1165  std::end(cpsInLayerCluster[lcId]),
1166  [](const auto& obj) { return obj.second < ScoreCutLCtoCP_; });
1167  if (assoc) {
1168  histograms.h_num_layercl_eta_perlayer.at(lcLayerId)->Fill(clusters[lcId].eta());
1169  histograms.h_num_layercl_phi_perlayer.at(lcLayerId)->Fill(clusters[lcId].phi());
1170  if (assoc > 1) {
1171  histograms.h_numMerge_layercl_eta_perlayer.at(lcLayerId)->Fill(clusters[lcId].eta());
1172  histograms.h_numMerge_layercl_phi_perlayer.at(lcLayerId)->Fill(clusters[lcId].phi());
1173  }
1174  auto best = std::min_element(std::begin(cpsInLayerCluster[lcId]),
1175  std::end(cpsInLayerCluster[lcId]),
1176  [](const auto& obj1, const auto& obj2) { return obj1.second < obj2.second; });
1177  auto const& best_cp_linked = cPOnLayer[best->first][lcLayerId].layerClusterIdToEnergyAndScore[lcId];
1178  histograms.h_sharedenergy_layercl2caloparticle_vs_eta_perlayer.at(lcLayerId)->Fill(
1179  clusters[lcId].eta(), best_cp_linked.first / clusters[lcId].energy());
1180  histograms.h_sharedenergy_layercl2caloparticle_vs_phi_perlayer.at(lcLayerId)->Fill(
1181  clusters[lcId].phi(), best_cp_linked.first / clusters[lcId].energy());
1182  }
1183  histograms.h_denom_layercl_eta_perlayer.at(lcLayerId)->Fill(clusters[lcId].eta());
1184  histograms.h_denom_layercl_phi_perlayer.at(lcLayerId)->Fill(clusters[lcId].phi());
1185  } // End of loop over LayerClusters
1186 
1187  // Here we do fill the plots to compute the different metrics linked to
1188  // gen-level, namely efficiency an duplicate. In this loop we should restrict
1189  // only to the selected caloParaticles.
1190  for (const auto& cpId : cPSelectedIndices) {
1191  for (unsigned int layerId = 0; layerId < layers * 2; ++layerId) {
1192  unsigned int CPNumberOfHits = cPOnLayer[cpId][layerId].hits_and_fractions.size();
1193  float CPenergy = cPOnLayer[cpId][layerId].energy;
1194  if (CPNumberOfHits == 0)
1195  continue;
1196  int lcWithMaxEnergyInCP = -1;
1197  float maxEnergyLCinCP = 0.f;
1198  float CPEnergyFractionInLC = 0.f;
1199  for (auto& lc : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
1200  if (lc.second.first > maxEnergyLCinCP) {
1201  maxEnergyLCinCP = lc.second.first;
1202  lcWithMaxEnergyInCP = lc.first;
1203  }
1204  }
1205  if (CPenergy > 0.f)
1206  CPEnergyFractionInLC = maxEnergyLCinCP / CPenergy;
1207 
1208  LogDebug("HGCalValidator") << std::setw(8) << "LayerId:\t" << std::setw(12) << "caloparticle\t" << std::setw(15)
1209  << "cp total energy\t" << std::setw(15) << "cpEnergyOnLayer\t" << std::setw(14)
1210  << "CPNhitsOnLayer\t" << std::setw(18) << "lcWithMaxEnergyInCP\t" << std::setw(15)
1211  << "maxEnergyLCinCP\t" << std::setw(20) << "CPEnergyFractionInLC"
1212  << "\n";
1213  LogDebug("HGCalValidator") << std::setw(8) << layerId << "\t" << std::setw(12) << cpId << "\t" << std::setw(15)
1214  << cP[cpId].energy() << "\t" << std::setw(15) << CPenergy << "\t" << std::setw(14)
1215  << CPNumberOfHits << "\t" << std::setw(18) << lcWithMaxEnergyInCP << "\t"
1216  << std::setw(15) << maxEnergyLCinCP << "\t" << std::setw(20) << CPEnergyFractionInLC
1217  << "\n";
1218 
1219  // Compute the correct normalization
1220  float invCPEnergyWeight = 0.f;
1221  for (auto const& haf : cPOnLayer[cpId][layerId].hits_and_fractions) {
1222  invCPEnergyWeight +=
1223  (haf.second * hitMap.at(haf.first)->energy()) * (haf.second * hitMap.at(haf.first)->energy());
1224  }
1225  invCPEnergyWeight = 1.f / invCPEnergyWeight;
1226 
1227  for (unsigned int i = 0; i < CPNumberOfHits; ++i) {
1228  auto& cp_hitDetId = cPOnLayer[cpId][layerId].hits_and_fractions[i].first;
1229  auto& cpFraction = cPOnLayer[cpId][layerId].hits_and_fractions[i].second;
1230 
1231  bool hitWithNoLC = false;
1232  if (cpFraction == 0.f)
1233  continue; //hopefully this should never happen
1234  auto hit_find_in_LC = detIdToLayerClusterId_Map.find(cp_hitDetId);
1235  if (hit_find_in_LC == detIdToLayerClusterId_Map.end())
1236  hitWithNoLC = true;
1237  auto itcheck = hitMap.find(cp_hitDetId);
1238  const HGCRecHit* hit = itcheck->second;
1239  float hitEnergyWeight = hit->energy() * hit->energy();
1240  for (auto& lcPair : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
1241  unsigned int layerClusterId = lcPair.first;
1242  float lcFraction = 0.f;
1243 
1244  if (!hitWithNoLC) {
1245  auto findHitIt = std::find(detIdToLayerClusterId_Map[cp_hitDetId].begin(),
1246  detIdToLayerClusterId_Map[cp_hitDetId].end(),
1247  HGVHistoProducerAlgo::detIdInfoInCluster{layerClusterId, 0.f});
1248  if (findHitIt != detIdToLayerClusterId_Map[cp_hitDetId].end())
1249  lcFraction = findHitIt->fraction;
1250  }
1251  // if (lcFraction == 0.) {
1252  // lcFraction = -1.;
1253  // }
1254  if (lcPair.second.second == FLT_MAX) {
1255  lcPair.second.second = 0.f;
1256  }
1257  lcPair.second.second +=
1258  (lcFraction - cpFraction) * (lcFraction - cpFraction) * hitEnergyWeight * invCPEnergyWeight;
1259  LogDebug("HGCalValidator") << "cpDetId:\t" << (uint32_t)cp_hitDetId << "\tlayerClusterId:\t" << layerClusterId
1260  << "\t"
1261  << "lcfraction,cpfraction:\t" << lcFraction << ", " << cpFraction << "\t"
1262  << "hitEnergyWeight:\t" << hitEnergyWeight << "\t"
1263  << "current score:\t" << lcPair.second.second << "\t"
1264  << "invCPEnergyWeight:\t" << invCPEnergyWeight << "\n";
1265  } // End of loop over LayerClusters linked to hits of this CaloParticle
1266  } // End of loop over hits of CaloParticle on a Layer
1267 
1268  if (cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore.empty())
1269  LogDebug("HGCalValidator") << "CP Id: \t" << cpId << "\tLC id:\t-1 "
1270  << "\t score \t-1"
1271  << "\n";
1272 
1273  for (auto& lcPair : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
1274  LogDebug("HGCalValidator") << "CP Id: \t" << cpId << "\t LC id: \t" << lcPair.first << "\t score \t"
1275  << lcPair.second.second << "\t"
1276  << "shared energy:\t" << lcPair.second.first << "\t"
1277  << "shared energy fraction:\t" << (lcPair.second.first / CPenergy) << "\n";
1278  histograms.h_score_caloparticle2layercl_perlayer.at(layerId)->Fill(lcPair.second.second);
1279  histograms.h_sharedenergy_caloparticle2layercl_perlayer.at(layerId)->Fill(lcPair.second.first / CPenergy,
1280  CPenergy);
1281  histograms.h_energy_vs_score_caloparticle2layercl_perlayer.at(layerId)->Fill(lcPair.second.second,
1282  lcPair.second.first / CPenergy);
1283  }
1284  auto assoc = std::count_if(std::begin(cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore),
1285  std::end(cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore),
1286  [](const auto& obj) { return obj.second.second < ScoreCutCPtoLC_; });
1287  if (assoc) {
1288  histograms.h_num_caloparticle_eta_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
1289  histograms.h_num_caloparticle_phi_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
1290  if (assoc > 1) {
1291  histograms.h_numDup_caloparticle_eta_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
1292  histograms.h_numDup_caloparticle_phi_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
1293  }
1294  auto best = std::min_element(
1295  std::begin(cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore),
1296  std::end(cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore),
1297  [](const auto& obj1, const auto& obj2) { return obj1.second.second < obj2.second.second; });
1298  histograms.h_sharedenergy_caloparticle2layercl_vs_eta_perlayer.at(layerId)->Fill(
1299  cP[cpId].g4Tracks()[0].momentum().eta(), best->second.first / CPenergy);
1300  histograms.h_sharedenergy_caloparticle2layercl_vs_phi_perlayer.at(layerId)->Fill(
1301  cP[cpId].g4Tracks()[0].momentum().phi(), best->second.first / CPenergy);
1302  }
1303  histograms.h_denom_caloparticle_eta_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
1304  histograms.h_denom_caloparticle_phi_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
1305  }
1306  }
1307 }
1308 
1310  int count,
1312  const Density& densities,
1313  std::vector<CaloParticle> const& cP,
1314  std::vector<size_t> const& cPIndices,
1315  std::vector<size_t> const& cPSelectedIndices,
1316  std::map<DetId, const HGCRecHit*> const& hitMap,
1317  std::map<double, double> cummatbudg,
1318  unsigned layers,
1319  std::vector<int> thicknesses) const {
1320  //Each event to be treated as two events: an event in +ve endcap,
1321  //plus another event in -ve endcap. In this spirit there will be
1322  //a layer variable (layerid) that maps the layers in :
1323  //-z: 0->51
1324  //+z: 52->103
1325 
1326  //To keep track of total num of layer clusters per layer
1327  //tnlcpl[layerid]
1328  std::vector<int> tnlcpl(1000, 0); //tnlcpl.clear(); tnlcpl.reserve(1000);
1329 
1330  //To keep track of the total num of clusters per thickness in plus and in minus endcaps
1331  std::map<std::string, int> tnlcpthplus;
1332  tnlcpthplus.clear();
1333  std::map<std::string, int> tnlcpthminus;
1334  tnlcpthminus.clear();
1335  //At the beginning of the event all layers should be initialized to zero total clusters per thickness
1336  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
1337  tnlcpthplus.insert(std::pair<std::string, int>(std::to_string(*it), 0));
1338  tnlcpthminus.insert(std::pair<std::string, int>(std::to_string(*it), 0));
1339  }
1340  //To keep track of the total num of clusters with mixed thickness hits per event
1341  tnlcpthplus.insert(std::pair<std::string, int>("mixed", 0));
1342  tnlcpthminus.insert(std::pair<std::string, int>("mixed", 0));
1343 
1344  layerClusters_to_CaloParticles(histograms, clusters, cP, cPIndices, cPSelectedIndices, hitMap, layers);
1345 
1346  //To find out the total amount of energy clustered per layer
1347  //Initialize with zeros because I see clear gives weird numbers.
1348  std::vector<double> tecpl(1000, 0.0); //tecpl.clear(); tecpl.reserve(1000);
1349  //for the longitudinal depth barycenter
1350  std::vector<double> ldbar(1000, 0.0); //ldbar.clear(); ldbar.reserve(1000);
1351 
1352  //We need to compare with the total amount of energy coming from caloparticles
1353  double caloparteneplus = 0.;
1354  double caloparteneminus = 0.;
1355  for (const auto& cpId : cPIndices) {
1356  if (cP[cpId].eta() >= 0.) {
1357  caloparteneplus = caloparteneplus + cP[cpId].energy();
1358  }
1359  if (cP[cpId].eta() < 0.) {
1360  caloparteneminus = caloparteneminus + cP[cpId].energy();
1361  }
1362  }
1363 
1364  //loop through clusters of the event
1365  for (unsigned int layerclusterIndex = 0; layerclusterIndex < clusters.size(); layerclusterIndex++) {
1366  const std::vector<std::pair<DetId, float>> hits_and_fractions = clusters[layerclusterIndex].hitsAndFractions();
1367 
1368  const DetId seedid = clusters[layerclusterIndex].seed();
1369  const double seedx = recHitTools_->getPosition(seedid).x();
1370  const double seedy = recHitTools_->getPosition(seedid).y();
1371  DetId maxid = findmaxhit(clusters[layerclusterIndex], hitMap);
1372 
1373  // const DetId maxid = clusters[layerclusterIndex].max();
1374  double maxx = recHitTools_->getPosition(maxid).x();
1375  double maxy = recHitTools_->getPosition(maxid).y();
1376 
1377  //Auxillary variables to count the number of different kind of hits in each cluster
1378  int nthhits120p = 0;
1379  int nthhits200p = 0;
1380  int nthhits300p = 0;
1381  int nthhitsscintp = 0;
1382  int nthhits120m = 0;
1383  int nthhits200m = 0;
1384  int nthhits300m = 0;
1385  int nthhitsscintm = 0;
1386  //For the hits thickness of the layer cluster.
1387  double thickness = 0.;
1388  //The layer the cluster belongs to. As mentioned in the mapping above, it takes into account -z and +z.
1389  int layerid = 0;
1390  //We will need another layer variable for the longitudinal material budget file reading.
1391  //In this case we need no distinction between -z and +z.
1392  int lay = 0;
1393  //We will need here to save the combination thick_lay
1394  std::string istr = "";
1395  //boolean to check for the layer that the cluster belong to. Maybe later will check all the layer hits.
1396  bool cluslay = true;
1397  //zside that the current cluster belongs to.
1398  int zside = 0;
1399 
1400  for (std::vector<std::pair<DetId, float>>::const_iterator it_haf = hits_and_fractions.begin();
1401  it_haf != hits_and_fractions.end();
1402  ++it_haf) {
1403  const DetId rh_detid = it_haf->first;
1404  //The layer that the current hit belongs to
1405  layerid = recHitTools_->getLayerWithOffset(rh_detid) + layers * ((recHitTools_->zside(rh_detid) + 1) >> 1) - 1;
1406  lay = recHitTools_->getLayerWithOffset(rh_detid);
1407  zside = recHitTools_->zside(rh_detid);
1408  if (rh_detid.det() == DetId::Forward || rh_detid.det() == DetId::HGCalEE || rh_detid.det() == DetId::HGCalHSi) {
1409  thickness = recHitTools_->getSiThickness(rh_detid);
1410  } else if (rh_detid.det() == DetId::HGCalHSc) {
1411  thickness = -1;
1412  } else {
1413  LogDebug("HGCalValidator") << "These are HGCal layer clusters, you shouldn't be here !!! " << layerid << "\n";
1414  continue;
1415  }
1416 
1417  //Count here only once the layer cluster and save the combination thick_layerid
1418  std::string curistr = std::to_string((int)thickness);
1419  std::string lay_string = std::to_string(layerid);
1420  while (lay_string.size() < 2)
1421  lay_string.insert(0, "0");
1422  curistr += "_" + lay_string;
1423  if (cluslay) {
1424  tnlcpl[layerid]++;
1425  istr = curistr;
1426  cluslay = false;
1427  }
1428 
1429  if ((thickness == 120.) && (recHitTools_->zside(rh_detid) > 0.)) {
1430  nthhits120p++;
1431  } else if ((thickness == 120.) && (recHitTools_->zside(rh_detid) < 0.)) {
1432  nthhits120m++;
1433  } else if ((thickness == 200.) && (recHitTools_->zside(rh_detid) > 0.)) {
1434  nthhits200p++;
1435  } else if ((thickness == 200.) && (recHitTools_->zside(rh_detid) < 0.)) {
1436  nthhits200m++;
1437  } else if ((thickness == 300.) && (recHitTools_->zside(rh_detid) > 0.)) {
1438  nthhits300p++;
1439  } else if ((thickness == 300.) && (recHitTools_->zside(rh_detid) < 0.)) {
1440  nthhits300m++;
1441  } else if ((thickness == -1) && (recHitTools_->zside(rh_detid) > 0.)) {
1442  nthhitsscintp++;
1443  } else if ((thickness == -1) && (recHitTools_->zside(rh_detid) < 0.)) {
1444  nthhitsscintm++;
1445  } else { //assert(0);
1446  LogDebug("HGCalValidator")
1447  << " You are running a geometry that contains thicknesses different than the normal ones. "
1448  << "\n";
1449  }
1450 
1451  std::map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
1452  if (itcheck == hitMap.end()) {
1453  LogDebug("HGCalValidator") << " You shouldn't be here - Unable to find a hit " << rh_detid.rawId() << " "
1454  << rh_detid.det() << " " << HGCalDetId(rh_detid) << "\n";
1455  continue;
1456  }
1457 
1458  const HGCRecHit* hit = itcheck->second;
1459 
1460  //Here for the per cell plots
1461  //----
1462  const double hit_x = recHitTools_->getPosition(rh_detid).x();
1463  const double hit_y = recHitTools_->getPosition(rh_detid).y();
1464  double distancetoseed = distance(seedx, seedy, hit_x, hit_y);
1465  double distancetomax = distance(maxx, maxy, hit_x, hit_y);
1466  if (distancetoseed != 0. && histograms.h_distancetoseedcell_perthickperlayer.count(curistr)) {
1467  histograms.h_distancetoseedcell_perthickperlayer.at(curistr)->Fill(distancetoseed);
1468  }
1469  //----
1470  if (distancetoseed != 0. && histograms.h_distancetoseedcell_perthickperlayer_eneweighted.count(curistr)) {
1471  histograms.h_distancetoseedcell_perthickperlayer_eneweighted.at(curistr)->Fill(distancetoseed, hit->energy());
1472  }
1473  //----
1474  if (distancetomax != 0. && histograms.h_distancetomaxcell_perthickperlayer.count(curistr)) {
1475  histograms.h_distancetomaxcell_perthickperlayer.at(curistr)->Fill(distancetomax);
1476  }
1477  //----
1478  if (distancetomax != 0. && histograms.h_distancetomaxcell_perthickperlayer_eneweighted.count(curistr)) {
1479  histograms.h_distancetomaxcell_perthickperlayer_eneweighted.at(curistr)->Fill(distancetomax, hit->energy());
1480  }
1481 
1482  //Let's check the density
1483  std::map<DetId, float>::const_iterator dit = densities.find(rh_detid);
1484  if (dit == densities.end()) {
1485  LogDebug("HGCalValidator") << " You shouldn't be here - Unable to find a density " << rh_detid.rawId() << " "
1486  << rh_detid.det() << " " << HGCalDetId(rh_detid) << "\n";
1487  continue;
1488  }
1489 
1490  if (histograms.h_cellsenedens_perthick.count((int)thickness)) {
1491  histograms.h_cellsenedens_perthick.at((int)thickness)->Fill(dit->second);
1492  }
1493 
1494  } // end of loop through hits and fractions
1495 
1496  //Check for simultaneously having hits of different kind. Checking at least two combinations is sufficient.
1497  if ((nthhits120p != 0 && nthhits200p != 0) || (nthhits120p != 0 && nthhits300p != 0) ||
1498  (nthhits120p != 0 && nthhitsscintp != 0) || (nthhits200p != 0 && nthhits300p != 0) ||
1499  (nthhits200p != 0 && nthhitsscintp != 0) || (nthhits300p != 0 && nthhitsscintp != 0)) {
1500  tnlcpthplus["mixed"]++;
1501  } else if ((nthhits120p != 0 || nthhits200p != 0 || nthhits300p != 0 || nthhitsscintp != 0)) {
1502  //This is a cluster with hits of one kind
1503  tnlcpthplus[std::to_string((int)thickness)]++;
1504  }
1505  if ((nthhits120m != 0 && nthhits200m != 0) || (nthhits120m != 0 && nthhits300m != 0) ||
1506  (nthhits120m != 0 && nthhitsscintm != 0) || (nthhits200m != 0 && nthhits300m != 0) ||
1507  (nthhits200m != 0 && nthhitsscintm != 0) || (nthhits300m != 0 && nthhitsscintm != 0)) {
1508  tnlcpthminus["mixed"]++;
1509  } else if ((nthhits120m != 0 || nthhits200m != 0 || nthhits300m != 0 || nthhitsscintm != 0)) {
1510  //This is a cluster with hits of one kind
1511  tnlcpthminus[std::to_string((int)thickness)]++;
1512  }
1513 
1514  //To find the thickness with the biggest amount of cells
1515  std::vector<int> bigamoth;
1516  bigamoth.clear();
1517  if (zside > 0) {
1518  bigamoth.push_back(nthhits120p);
1519  bigamoth.push_back(nthhits200p);
1520  bigamoth.push_back(nthhits300p);
1521  bigamoth.push_back(nthhitsscintp);
1522  }
1523  if (zside < 0) {
1524  bigamoth.push_back(nthhits120m);
1525  bigamoth.push_back(nthhits200m);
1526  bigamoth.push_back(nthhits300m);
1527  bigamoth.push_back(nthhitsscintm);
1528  }
1529  auto bgth = std::max_element(bigamoth.begin(), bigamoth.end());
1530  istr = std::to_string(thicknesses[std::distance(bigamoth.begin(), bgth)]);
1531  std::string lay_string = std::to_string(layerid);
1532  while (lay_string.size() < 2)
1533  lay_string.insert(0, "0");
1534  istr += "_" + lay_string;
1535 
1536  //Here for the per cluster plots that need the thickness_layer info
1537  if (histograms.h_cellsnum_perthickperlayer.count(istr)) {
1538  histograms.h_cellsnum_perthickperlayer.at(istr)->Fill(hits_and_fractions.size());
1539  }
1540 
1541  //Now, with the distance between seed and max cell.
1542  double distancebetseedandmax = distance(seedx, seedy, maxx, maxy);
1543  //The thickness_layer combination in this case will use the thickness of the seed as a convention.
1544  std::string seedstr = std::to_string((int)recHitTools_->getSiThickness(seedid)) + "_" + std::to_string(layerid);
1545  seedstr += "_" + lay_string;
1546  if (histograms.h_distancebetseedandmaxcell_perthickperlayer.count(seedstr)) {
1547  histograms.h_distancebetseedandmaxcell_perthickperlayer.at(seedstr)->Fill(distancebetseedandmax);
1548  }
1549  if (histograms.h_distancebetseedandmaxcellvsclusterenergy_perthickperlayer.count(seedstr)) {
1550  histograms.h_distancebetseedandmaxcellvsclusterenergy_perthickperlayer.at(seedstr)->Fill(
1551  distancebetseedandmax, clusters[layerclusterIndex].energy());
1552  }
1553 
1554  //Energy clustered per layer
1555  tecpl[layerid] = tecpl[layerid] + clusters[layerclusterIndex].energy();
1556  ldbar[layerid] = ldbar[layerid] + clusters[layerclusterIndex].energy() * cummatbudg[(double)lay];
1557 
1558  } //end of loop through clusters of the event
1559 
1560  //After the end of the event we can now fill with the results.
1561  //First a couple of variables to keep the sum of the energy of all clusters
1562  double sumeneallcluspl = 0.;
1563  double sumeneallclusmi = 0.;
1564  //And the longitudinal variable
1565  double sumldbarpl = 0.;
1566  double sumldbarmi = 0.;
1567  //Per layer : Loop 0->103
1568  for (unsigned ilayer = 0; ilayer < layers * 2; ++ilayer) {
1569  if (histograms.h_clusternum_perlayer.count(ilayer)) {
1570  histograms.h_clusternum_perlayer.at(ilayer)->Fill(tnlcpl[ilayer]);
1571  }
1572  // Two times one for plus and one for minus
1573  //First with the -z endcap
1574  if (ilayer < layers) {
1575  if (histograms.h_energyclustered_perlayer.count(ilayer)) {
1576  if (caloparteneminus != 0.) {
1577  histograms.h_energyclustered_perlayer.at(ilayer)->Fill(100. * tecpl[ilayer] / caloparteneminus);
1578  }
1579  }
1580  //Keep here the total energy for the event in -z
1581  sumeneallclusmi = sumeneallclusmi + tecpl[ilayer];
1582  //And for the longitudinal variable
1583  sumldbarmi = sumldbarmi + ldbar[ilayer];
1584  } else { //Then for the +z
1585  if (histograms.h_energyclustered_perlayer.count(ilayer)) {
1586  if (caloparteneplus != 0.) {
1587  histograms.h_energyclustered_perlayer.at(ilayer)->Fill(100. * tecpl[ilayer] / caloparteneplus);
1588  }
1589  }
1590  //Keep here the total energy for the event in -z
1591  sumeneallcluspl = sumeneallcluspl + tecpl[ilayer];
1592  //And for the longitudinal variable
1593  sumldbarpl = sumldbarpl + ldbar[ilayer];
1594  } //end of +z loop
1595 
1596  } //end of loop over layers
1597 
1598  //Per thickness
1599  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
1600  if (histograms.h_clusternum_perthick.count(*it)) {
1601  histograms.h_clusternum_perthick.at(*it)->Fill(tnlcpthplus[std::to_string(*it)]);
1602  histograms.h_clusternum_perthick.at(*it)->Fill(tnlcpthminus[std::to_string(*it)]);
1603  }
1604  }
1605  //Mixed thickness clusters
1606  histograms.h_mixedhitscluster_zplus[count]->Fill(tnlcpthplus["mixed"]);
1607  histograms.h_mixedhitscluster_zminus[count]->Fill(tnlcpthminus["mixed"]);
1608 
1609  //Total energy clustered from all layer clusters (fraction)
1610  if (caloparteneplus != 0.) {
1611  histograms.h_energyclustered_zplus[count]->Fill(100. * sumeneallcluspl / caloparteneplus);
1612  }
1613  if (caloparteneminus != 0.) {
1614  histograms.h_energyclustered_zminus[count]->Fill(100. * sumeneallclusmi / caloparteneminus);
1615  }
1616 
1617  //For the longitudinal depth barycenter
1618  histograms.h_longdepthbarycentre_zplus[count]->Fill(sumldbarpl / sumeneallcluspl);
1619  histograms.h_longdepthbarycentre_zminus[count]->Fill(sumldbarmi / sumeneallclusmi);
1620 }
1621 
1623  int count,
1624  const std::vector<reco::HGCalMultiCluster>& multiClusters,
1625  std::vector<CaloParticle> const& cP,
1626  std::vector<size_t> const& cPIndices,
1627  std::vector<size_t> const& cPSelectedIndices,
1628  std::map<DetId, const HGCRecHit*> const& hitMap,
1629  unsigned layers) const {
1630  auto nMultiClusters = multiClusters.size();
1631  //Consider CaloParticles coming from the hard scatterer, excluding the PU contribution.
1632  auto nCaloParticles = cPIndices.size();
1633 
1634  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>> detIdToCaloParticleId_Map;
1635  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInMultiCluster>> detIdToMultiClusterId_Map;
1636  std::vector<int> tracksters_fakemerge(nMultiClusters, 0);
1637  std::vector<int> tracksters_duplicate(nMultiClusters, 0);
1638 
1639  // this contains the ids of the caloparticles contributing with at least one hit to the multi cluster and the reconstruction error
1640  //cpsInLayerCluster[multicluster][CPids]
1641  //Connects a multicluster with all related caloparticles.
1642  std::vector<std::vector<std::pair<unsigned int, float>>> cpsInMultiCluster;
1643  cpsInMultiCluster.resize(nMultiClusters);
1644 
1645  //cPOnLayer[caloparticle][layer]
1646  //This defines a "calo particle on layer" concept. It is only filled in case
1647  //that calo particle has a reconstructed hit related via detid. So, a cPOnLayer[i][j] connects a
1648  //specific calo particle i in layer j with:
1649  //1. the sum of all rechits energy times fraction of the relevant simhit in layer j related to that calo particle i.
1650  //2. the hits and fractions of that calo particle i in layer j.
1651  //3. the layer clusters with matched rechit id.
1652  std::unordered_map<int, std::vector<caloParticleOnLayer>> cPOnLayer;
1653  for (unsigned int i = 0; i < nCaloParticles; ++i) {
1654  auto cpIndex = cPIndices[i];
1655  cPOnLayer[cpIndex].resize(layers * 2);
1656  for (unsigned int j = 0; j < layers * 2; ++j) {
1657  cPOnLayer[cpIndex][j].caloParticleId = cpIndex;
1658  cPOnLayer[cpIndex][j].energy = 0.f;
1659  cPOnLayer[cpIndex][j].hits_and_fractions.clear();
1660  }
1661  }
1662 
1663  for (const auto& cpId : cPIndices) {
1664  //take sim clusters
1665  const SimClusterRefVector& simClusterRefVector = cP[cpId].simClusters();
1666  //loop through sim clusters
1667  for (const auto& it_sc : simClusterRefVector) {
1668  const SimCluster& simCluster = (*(it_sc));
1669  const auto& hits_and_fractions = simCluster.hits_and_fractions();
1670  for (const auto& it_haf : hits_and_fractions) {
1671  DetId hitid = (it_haf.first);
1672  //V9:maps the layers in -z: 0->51 and in +z: 52->103
1673  //V10:maps the layers in -z: 0->49 and in +z: 50->99
1674  int cpLayerId = recHitTools_->getLayerWithOffset(hitid) + layers * ((recHitTools_->zside(hitid) + 1) >> 1) - 1;
1675  std::map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(hitid);
1676  //Checks whether the current hit belonging to sim cluster has a reconstructed hit.
1677  if (itcheck != hitMap.end()) {
1678  const HGCRecHit* hit = itcheck->second;
1679  //Since the current hit from sim cluster has a reconstructed hit with the same detid,
1680  //make a map that will connect a detid with:
1681  //1. the caloparticles that have a simcluster with sim hits in that cell via caloparticle id.
1682  //2. the sum of all simhits fractions that contributes to that detid.
1683  //So, keep in mind that in case of multiple caloparticles contributing in the same cell
1684  //the fraction is the sum over all calo particles. So, something like:
1685  //detid: (caloparticle 1, sum of hits fractions in that detid over all cp) , (caloparticle 2, sum of hits fractions in that detid over all cp), (caloparticle 3, sum of hits fractions in that detid over all cp) ...
1686  auto hit_find_it = detIdToCaloParticleId_Map.find(hitid);
1687  if (hit_find_it == detIdToCaloParticleId_Map.end()) {
1688  detIdToCaloParticleId_Map[hitid] = std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>();
1689  detIdToCaloParticleId_Map[hitid].emplace_back(
1690  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
1691  } else {
1692  auto findHitIt = std::find(detIdToCaloParticleId_Map[hitid].begin(),
1693  detIdToCaloParticleId_Map[hitid].end(),
1694  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
1695  if (findHitIt != detIdToCaloParticleId_Map[hitid].end()) {
1696  findHitIt->fraction += it_haf.second;
1697  } else {
1698  detIdToCaloParticleId_Map[hitid].emplace_back(
1699  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
1700  }
1701  }
1702  //Since the current hit from sim cluster has a reconstructed hit with the same detid,
1703  //fill the cPOnLayer[caloparticle][layer] object with energy (sum of all rechits energy times fraction
1704  //of the relevant simhit) and keep the hit (detid and fraction) that contributed.
1705  cPOnLayer[cpId][cpLayerId].energy += it_haf.second * hit->energy();
1706  // We need to compress the hits and fractions in order to have a
1707  // reasonable score between CP and LC. Imagine, for example, that a
1708  // CP has detID X used by 2 SimClusters with different fractions. If
1709  // a single LC uses X with fraction 1 and is compared to the 2
1710  // contributions separately, it will be assigned a score != 0, which
1711  // is wrong.
1712  auto& haf = cPOnLayer[cpId][cpLayerId].hits_and_fractions;
1713  auto found = std::find_if(
1714  std::begin(haf), std::end(haf), [&hitid](const std::pair<DetId, float>& v) { return v.first == hitid; });
1715  if (found != haf.end()) {
1716  found->second += it_haf.second;
1717  } else {
1718  cPOnLayer[cpId][cpLayerId].hits_and_fractions.emplace_back(hitid, it_haf.second);
1719  }
1720  }
1721  } // end of loop through simhits
1722  } // end of loop through simclusters
1723  } // end of loop through caloparticles
1724 
1725  //Loop through multiclusters
1726  for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
1727  const auto& hits_and_fractions = multiClusters[mclId].hitsAndFractions();
1728 
1729  if (!hits_and_fractions.empty()) {
1730  std::unordered_map<unsigned, float> CPEnergyInMCL;
1731  int maxCPId_byNumberOfHits = -1;
1732  unsigned int maxCPNumberOfHitsInMCL = 0;
1733  int maxCPId_byEnergy = -1;
1734  float maxEnergySharedMCLandCP = 0.f;
1735  float energyFractionOfMCLinCP = 0.f;
1736  float energyFractionOfCPinMCL = 0.f;
1737 
1738  //In case of matched rechit-simhit, so matched
1739  //caloparticle-layercluster-multicluster, he counts and saves the number of
1740  //rechits related to the maximum energy CaloParticle out of all
1741  //CaloParticles related to that layer cluster and multicluster.
1742 
1743  std::unordered_map<unsigned, unsigned> occurrencesCPinMCL;
1744  unsigned int numberOfNoiseHitsInMCL = 0;
1745  unsigned int numberOfHaloHitsInMCL = 0;
1746  unsigned int numberOfHitsInMCL = 0;
1747 
1748  //number of hits related to that cluster.
1749  unsigned int numberOfHitsInLC = hits_and_fractions.size();
1750  numberOfHitsInMCL += numberOfHitsInLC;
1751  std::unordered_map<unsigned, float> CPEnergyInLC;
1752 
1753  //hitsToCaloParticleId is a vector of ints, one for each rechit of the
1754  //layer cluster under study. If negative, there is no simhit from any CaloParticle related.
1755  //If positive, at least one CaloParticle has been found with matched simhit.
1756  //In more detail:
1757  // 1. hitsToCaloParticleId[hitId] = -3
1758  // TN: These represent Halo Cells(N) that have not been
1759  // assigned to any CaloParticle (hence the T).
1760  // 2. hitsToCaloParticleId[hitId] = -2
1761  // FN: There represent Halo Cells(N) that have been assigned
1762  // to a CaloParticle (hence the F, since those should have not been marked as halo)
1763  // 3. hitsToCaloParticleId[hitId] = -1
1764  // FP: These represent Real Cells(P) that have not been
1765  // assigned to any CaloParticle (hence the F, since these are fakes)
1766  // 4. hitsToCaloParticleId[hitId] >= 0
1767  // TP There represent Real Cells(P) that have been assigned
1768  // to a CaloParticle (hence the T)
1769 
1770  std::vector<int> hitsToCaloParticleId(numberOfHitsInLC);
1771  //det id of the first hit just to make the lcLayerId variable
1772  //which maps the layers in -z: 0->51 and in +z: 52->103
1773  const auto firstHitDetId = hits_and_fractions[0].first;
1774  int lcLayerId = recHitTools_->getLayerWithOffset(firstHitDetId) +
1775  layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
1776 
1777  //Loop though the hits of the layer cluster under study
1778  for (unsigned int hitId = 0; hitId < numberOfHitsInLC; hitId++) {
1779  DetId rh_detid = hits_and_fractions[hitId].first;
1780  auto rhFraction = hits_and_fractions[hitId].second;
1781 
1782  //Since the hit is belonging to the layer cluster, it must also be in the rechits map.
1783  std::map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
1784  const HGCRecHit* hit = itcheck->second;
1785 
1786  //Make a map that will connect a detid (that belongs to a rechit of the layer cluster under study,
1787  //no need to save others) with:
1788  //1. the layer clusters that have rechits in that detid
1789  //2. the fraction of the rechit of each layer cluster that contributes to that detid.
1790  //So, something like:
1791  //detid: (layer cluster 1, hit fraction) , (layer cluster 2, hit fraction), (layer cluster 3, hit fraction) ...
1792  //here comparing with the calo particle map above the
1793  auto hit_find_in_LC = detIdToMultiClusterId_Map.find(rh_detid);
1794  if (hit_find_in_LC == detIdToMultiClusterId_Map.end()) {
1795  detIdToMultiClusterId_Map[rh_detid] = std::vector<HGVHistoProducerAlgo::detIdInfoInMultiCluster>();
1796  }
1797  detIdToMultiClusterId_Map[rh_detid].emplace_back(
1798  HGVHistoProducerAlgo::detIdInfoInMultiCluster{mclId, mclId, rhFraction});
1799 
1800  //Check whether the rechit of the layer cluster under study has a sim hit in the same cell.
1801  auto hit_find_in_CP = detIdToCaloParticleId_Map.find(rh_detid);
1802 
1803  // if the fraction is zero or the hit does not belong to any calo
1804  // particle, set the caloparticleId for the hit to -1 this will
1805  // contribute to the number of noise hits
1806 
1807  // MR Remove the case in which the fraction is 0, since this could be a
1808  // real hit that has been marked as halo.
1809  if (rhFraction == 0.) {
1810  hitsToCaloParticleId[hitId] = -2;
1811  numberOfHaloHitsInMCL++;
1812  }
1813  if (hit_find_in_CP == detIdToCaloParticleId_Map.end()) {
1814  hitsToCaloParticleId[hitId] -= 1;
1815  } else {
1816  auto maxCPEnergyInLC = 0.f;
1817  auto maxCPId = -1;
1818  for (auto& h : hit_find_in_CP->second) {
1819  auto shared_fraction = std::min(rhFraction, h.fraction);
1820  //We are in the case where there are calo particles with simhits connected via detid with the rechit under study
1821  //So, from all layers clusters, find the rechits that are connected with a calo particle and save/calculate the
1822  //energy of that calo particle as the sum over all rechits of the rechits energy weighted
1823  //by the caloparticle's fraction related to that rechit.
1824  CPEnergyInMCL[h.clusterId] += shared_fraction * hit->energy();
1825  //Same but for layer clusters for the cell association per layer
1826  CPEnergyInLC[h.clusterId] += shared_fraction * hit->energy();
1827  //Here cPOnLayer[caloparticle][layer] describe above is set.
1828  //Here for multi clusters with matched rechit the CP fraction times hit energy is added and saved .
1829  cPOnLayer[h.clusterId][lcLayerId].layerClusterIdToEnergyAndScore[mclId].first +=
1830  shared_fraction * hit->energy();
1831  cPOnLayer[h.clusterId][lcLayerId].layerClusterIdToEnergyAndScore[mclId].second = FLT_MAX;
1832  //cpsInMultiCluster[multicluster][CPids]
1833  //Connects a multi cluster with all related caloparticles.
1834  cpsInMultiCluster[mclId].emplace_back(h.clusterId, FLT_MAX);
1835  //From all CaloParticles related to a layer cluster, he saves id and energy of the calo particle
1836  //that after simhit-rechit matching in layer has the maximum energy.
1837  if (shared_fraction > maxCPEnergyInLC) {
1838  //energy is used only here. cpid is saved for multiclusters
1839  maxCPEnergyInLC = CPEnergyInLC[h.clusterId];
1840  maxCPId = h.clusterId;
1841  }
1842  }
1843  //Keep in mind here maxCPId could be zero. So, below ask for negative not including zero to count noise.
1844  hitsToCaloParticleId[hitId] = maxCPId;
1845  }
1846 
1847  } //end of loop through rechits of the layer cluster.
1848 
1849  //Loop through all rechits to count how many of them are noise and how many are matched.
1850  //In case of matched rechit-simhit, he counts and saves the number of rechits related to the maximum energy CaloParticle.
1851  for (auto c : hitsToCaloParticleId) {
1852  if (c < 0) {
1853  numberOfNoiseHitsInMCL++;
1854  } else {
1855  occurrencesCPinMCL[c]++;
1856  }
1857  }
1858 
1859  //Below from all maximum energy CaloParticles, he saves the one with the largest amount
1860  //of related rechits.
1861  for (auto& c : occurrencesCPinMCL) {
1862  if (c.second > maxCPNumberOfHitsInMCL) {
1863  maxCPId_byNumberOfHits = c.first;
1864  maxCPNumberOfHitsInMCL = c.second;
1865  }
1866  }
1867 
1868  //Find the CaloParticle that has the maximum energy shared with the multicluster under study.
1869  for (auto& c : CPEnergyInMCL) {
1870  if (c.second > maxEnergySharedMCLandCP) {
1871  maxCPId_byEnergy = c.first;
1872  maxEnergySharedMCLandCP = c.second;
1873  }
1874  }
1875  //The energy of the CaloParticle that found to have the maximum energy shared with the multicluster under study.
1876  float totalCPEnergyFromLayerCP = 0.f;
1877  if (maxCPId_byEnergy >= 0) {
1878  //Loop through all layers
1879  for (unsigned int j = 0; j < layers * 2; ++j) {
1880  totalCPEnergyFromLayerCP = totalCPEnergyFromLayerCP + cPOnLayer[maxCPId_byEnergy][j].energy;
1881  }
1882  energyFractionOfCPinMCL = maxEnergySharedMCLandCP / totalCPEnergyFromLayerCP;
1883  if (multiClusters[mclId].energy() > 0.f) {
1884  energyFractionOfMCLinCP = maxEnergySharedMCLandCP / multiClusters[mclId].energy();
1885  }
1886  }
1887 
1888  LogDebug("HGCalValidator") << std::setw(12) << "multiCluster"
1889  << "\t" //LogDebug("HGCalValidator")
1890  << std::setw(10) << "mulcl energy"
1891  << "\t" << std::setw(5) << "nhits"
1892  << "\t" << std::setw(12) << "noise hits"
1893  << "\t" << std::setw(22) << "maxCPId_byNumberOfHits"
1894  << "\t" << std::setw(8) << "nhitsCP"
1895  << "\t" << std::setw(16) << "maxCPId_byEnergy"
1896  << "\t" << std::setw(23) << "maxEnergySharedMCLandCP"
1897  << "\t" << std::setw(22) << "totalCPEnergyFromAllLayerCP"
1898  << "\t" << std::setw(22) << "energyFractionOfMCLinCP"
1899  << "\t" << std::setw(25) << "energyFractionOfCPinMCL"
1900  << "\t" << std::endl;
1901  LogDebug("HGCalValidator") << std::setw(12) << mclId << "\t" //LogDebug("HGCalValidator")
1902  << std::setw(10) << multiClusters[mclId].energy() << "\t" << std::setw(5)
1903  << numberOfHitsInMCL << "\t" << std::setw(12) << numberOfNoiseHitsInMCL << "\t"
1904  << std::setw(22) << maxCPId_byNumberOfHits << "\t" << std::setw(8)
1905  << maxCPNumberOfHitsInMCL << "\t" << std::setw(16) << maxCPId_byEnergy << "\t"
1906  << std::setw(23) << maxEnergySharedMCLandCP << "\t" << std::setw(22)
1907  << totalCPEnergyFromLayerCP << "\t" << std::setw(22) << energyFractionOfMCLinCP << "\t"
1908  << std::setw(25) << energyFractionOfCPinMCL << std::endl;
1909 
1910  } //end of loop through multi clusters
1911  }
1912 
1913  //Loop through multiclusters
1914  for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
1915  const auto& hits_and_fractions = multiClusters[mclId].hitsAndFractions();
1916 
1917  if (!hits_and_fractions.empty()) {
1918  // find the unique caloparticles id contributing to the multi clusters
1919  //cpsInMultiCluster[multicluster][CPids]
1920  std::sort(cpsInMultiCluster[mclId].begin(), cpsInMultiCluster[mclId].end());
1921  auto last = std::unique(cpsInMultiCluster[mclId].begin(), cpsInMultiCluster[mclId].end());
1922  cpsInMultiCluster[mclId].erase(last, cpsInMultiCluster[mclId].end());
1923 
1924  if (multiClusters[mclId].energy() == 0. && !cpsInMultiCluster[mclId].empty()) {
1925  //Loop through all CaloParticles contributing to multicluster mclId.
1926  for (auto& cpPair : cpsInMultiCluster[mclId]) {
1927  //In case of a multi cluster with zero energy but related CaloParticles the score is set to 1.
1928  cpPair.second = 1.;
1929  LogDebug("HGCalValidator") << "multiCluster Id: \t" << mclId << "\t CP id: \t" << cpPair.first
1930  << "\t score \t" << cpPair.second << std::endl;
1931  histograms.h_score_multicl2caloparticle[count]->Fill(cpPair.second);
1932  }
1933  continue;
1934  }
1935 
1936  // Compute the correct normalization
1937  float invMultiClusterEnergyWeight = 0.f;
1938  for (auto const& haf : multiClusters[mclId].hitsAndFractions()) {
1939  invMultiClusterEnergyWeight +=
1940  (haf.second * hitMap.at(haf.first)->energy()) * (haf.second * hitMap.at(haf.first)->energy());
1941  }
1942  invMultiClusterEnergyWeight = 1.f / invMultiClusterEnergyWeight;
1943 
1944  unsigned int numberOfHitsInLC = hits_and_fractions.size();
1945  for (unsigned int i = 0; i < numberOfHitsInLC; ++i) {
1946  DetId rh_detid = hits_and_fractions[i].first;
1947  float rhFraction = hits_and_fractions[i].second;
1948  bool hitWithNoCP = false;
1949 
1950  auto hit_find_in_CP = detIdToCaloParticleId_Map.find(rh_detid);
1951  if (hit_find_in_CP == detIdToCaloParticleId_Map.end())
1952  hitWithNoCP = true;
1953  auto itcheck = hitMap.find(rh_detid);
1954  const HGCRecHit* hit = itcheck->second;
1955  float hitEnergyWeight = hit->energy() * hit->energy();
1956 
1957  for (auto& cpPair : cpsInMultiCluster[mclId]) {
1958  float cpFraction = 0.f;
1959  if (!hitWithNoCP) {
1960  auto findHitIt = std::find(detIdToCaloParticleId_Map[rh_detid].begin(),
1961  detIdToCaloParticleId_Map[rh_detid].end(),
1962  HGVHistoProducerAlgo::detIdInfoInCluster{cpPair.first, 0.f});
1963  if (findHitIt != detIdToCaloParticleId_Map[rh_detid].end()) {
1964  cpFraction = findHitIt->fraction;
1965  }
1966  }
1967  if (cpPair.second == FLT_MAX) {
1968  cpPair.second = 0.f;
1969  }
1970  cpPair.second +=
1971  (rhFraction - cpFraction) * (rhFraction - cpFraction) * hitEnergyWeight * invMultiClusterEnergyWeight;
1972  }
1973  } //end of loop through rechits of layer cluster
1974 
1975  //In case of a multi cluster with some energy but none related CaloParticles print some info.
1976  if (cpsInMultiCluster[mclId].empty())
1977  LogDebug("HGCalValidator") << "multiCluster Id: \t" << mclId << "\tCP id:\t-1 "
1978  << "\t score \t-1"
1979  << "\n";
1980 
1981  auto score = std::min_element(std::begin(cpsInMultiCluster[mclId]),
1982  std::end(cpsInMultiCluster[mclId]),
1983  [](const auto& obj1, const auto& obj2) { return obj1.second < obj2.second; });
1984  for (auto& cpPair : cpsInMultiCluster[mclId]) {
1985  LogDebug("HGCalValidator") << "multiCluster Id: \t" << mclId << "\t CP id: \t" << cpPair.first << "\t score \t"
1986  << cpPair.second << std::endl;
1987  if (cpPair.first == score->first) {
1988  histograms.h_score_multicl2caloparticle[count]->Fill(score->second);
1989  }
1990  float sharedeneCPallLayers = 0.;
1991  //Loop through all layers
1992  for (unsigned int j = 0; j < layers * 2; ++j) {
1993  auto const& cp_linked = cPOnLayer[cpPair.first][j].layerClusterIdToEnergyAndScore[mclId];
1994  sharedeneCPallLayers += cp_linked.first;
1995  } //end of loop through layers
1996  LogDebug("HGCalValidator") << "sharedeneCPallLayers " << sharedeneCPallLayers << std::endl;
1997  if (cpPair.first == score->first) {
1998  histograms.h_sharedenergy_multicl2caloparticle[count]->Fill(sharedeneCPallLayers /
1999  multiClusters[mclId].energy());
2000  histograms.h_energy_vs_score_multicl2caloparticle[count]->Fill(
2001  score->second, sharedeneCPallLayers / multiClusters[mclId].energy());
2002  }
2003  }
2004 
2005  auto assocFakeMerge = std::count_if(std::begin(cpsInMultiCluster[mclId]),
2006  std::end(cpsInMultiCluster[mclId]),
2007  [](const auto& obj) { return obj.second < ScoreCutMCLtoCPFakeMerge_; });
2008  tracksters_fakemerge[mclId] = assocFakeMerge;
2009  }
2010  } //end of loop through multiclusters
2011 
2012  std::unordered_map<int, std::vector<float>> score3d;
2013  std::unordered_map<int, std::vector<float>> mclsharedenergy;
2014  std::unordered_map<int, std::vector<float>> mclsharedenergyfrac;
2015 
2016  for (unsigned int i = 0; i < nCaloParticles; ++i) {
2017  auto cpIndex = cPIndices[i];
2018  score3d[cpIndex].resize(nMultiClusters);
2019  mclsharedenergy[cpIndex].resize(nMultiClusters);
2020  mclsharedenergyfrac[cpIndex].resize(nMultiClusters);
2021  for (unsigned int j = 0; j < nMultiClusters; ++j) {
2022  score3d[cpIndex][j] = FLT_MAX;
2023  mclsharedenergy[cpIndex][j] = 0.f;
2024  mclsharedenergyfrac[cpIndex][j] = 0.f;
2025  }
2026  }
2027 
2028  // Here we do fill the plots to compute the different metrics linked to
2029  // gen-level, namely efficiency an duplicate. In this loop we should restrict
2030  // only to the selected caloParaticles.
2031  for (const auto& cpId : cPSelectedIndices) {
2032  //We need to keep the multiclusters ids that are related to
2033  //CaloParticle under study for the final filling of the score.
2034  std::vector<unsigned int> cpId_mclId_related;
2035  cpId_mclId_related.clear();
2036 
2037  float CPenergy = 0.f;
2038  for (unsigned int layerId = 0; layerId < layers * 2; ++layerId) {
2039  unsigned int CPNumberOfHits = cPOnLayer[cpId][layerId].hits_and_fractions.size();
2040  //Below gives the CP energy related to multicluster per layer.
2041  CPenergy += cPOnLayer[cpId][layerId].energy;
2042  if (CPNumberOfHits == 0)
2043  continue;
2044  int mclWithMaxEnergyInCP = -1;
2045  //This is the maximum energy related to multicluster per layer.
2046  float maxEnergyMCLperlayerinCP = 0.f;
2047  float CPEnergyFractionInMCLperlayer = 0.f;
2048  //Remember and not confused by name. layerClusterIdToEnergyAndScore contains the multicluster id.
2049  for (const auto& mcl : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
2050  if (mcl.second.first > maxEnergyMCLperlayerinCP) {
2051  maxEnergyMCLperlayerinCP = mcl.second.first;
2052  mclWithMaxEnergyInCP = mcl.first;
2053  }
2054  }
2055  if (CPenergy > 0.f)
2056  CPEnergyFractionInMCLperlayer = maxEnergyMCLperlayerinCP / CPenergy;
2057 
2058  LogDebug("HGCalValidator") << std::setw(8) << "LayerId:\t" << std::setw(12) << "caloparticle\t" << std::setw(15)
2059  << "cp total energy\t" << std::setw(15) << "cpEnergyOnLayer\t" << std::setw(14)
2060  << "CPNhitsOnLayer\t" << std::setw(18) << "mclWithMaxEnergyInCP\t" << std::setw(15)
2061  << "maxEnergyMCLinCP\t" << std::setw(20) << "CPEnergyFractionInMCL"
2062  << "\n";
2063  LogDebug("HGCalValidator") << std::setw(8) << layerId << "\t" << std::setw(12) << cpId << "\t" << std::setw(15)
2064  << cP[cpId].energy() << "\t" << std::setw(15) << CPenergy << "\t" << std::setw(14)
2065  << CPNumberOfHits << "\t" << std::setw(18) << mclWithMaxEnergyInCP << "\t"
2066  << std::setw(15) << maxEnergyMCLperlayerinCP << "\t" << std::setw(20)
2067  << CPEnergyFractionInMCLperlayer << "\n";
2068 
2069  for (unsigned int i = 0; i < CPNumberOfHits; ++i) {
2070  auto& cp_hitDetId = cPOnLayer[cpId][layerId].hits_and_fractions[i].first;
2071  auto& cpFraction = cPOnLayer[cpId][layerId].hits_and_fractions[i].second;
2072 
2073  bool hitWithNoMCL = false;
2074  if (cpFraction == 0.f)
2075  continue; //hopefully this should never happen
2076  auto hit_find_in_MCL = detIdToMultiClusterId_Map.find(cp_hitDetId);
2077  if (hit_find_in_MCL == detIdToMultiClusterId_Map.end())
2078  hitWithNoMCL = true;
2079  auto itcheck = hitMap.find(cp_hitDetId);
2080  const HGCRecHit* hit = itcheck->second;
2081  float hitEnergyWeight = hit->energy() * hit->energy();
2082  for (auto& lcPair : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
2083  unsigned int multiClusterId = lcPair.first;
2084  if (std::find(std::begin(cpId_mclId_related), std::end(cpId_mclId_related), multiClusterId) ==
2085  std::end(cpId_mclId_related)) {
2086  cpId_mclId_related.push_back(multiClusterId);
2087  }
2088  float mclFraction = 0.f;
2089 
2090  if (!hitWithNoMCL) {
2091  auto findHitIt = std::find(detIdToMultiClusterId_Map[cp_hitDetId].begin(),
2092  detIdToMultiClusterId_Map[cp_hitDetId].end(),
2093  HGVHistoProducerAlgo::detIdInfoInMultiCluster{multiClusterId, 0, 0.f});
2094  if (findHitIt != detIdToMultiClusterId_Map[cp_hitDetId].end())
2095  mclFraction = findHitIt->fraction;
2096  }
2097  //Observe here that we do not divide as before by the layer cluster energy weight. We should sum first
2098  //over all layers and divide with the total CP energy over all layers.
2099  if (lcPair.second.second == FLT_MAX) {
2100  lcPair.second.second = 0.f;
2101  }
2102  lcPair.second.second += (mclFraction - cpFraction) * (mclFraction - cpFraction) * hitEnergyWeight;
2103  LogDebug("HGCalValidator") << "multiClusterId:\t" << multiClusterId << "\t"
2104  << "mclfraction,cpfraction:\t" << mclFraction << ", " << cpFraction << "\t"
2105  << "hitEnergyWeight:\t" << hitEnergyWeight << "\t"
2106  << "currect score numerator:\t" << lcPair.second.second << "\n";
2107  }
2108  } //end of loop through sim hits of current calo particle
2109 
2110  if (cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore.empty())
2111  LogDebug("HGCalValidator") << "CP Id: \t" << cpId << "\t MCL id:\t-1 "
2112  << "\t layer \t " << layerId << " Sub score in \t -1"
2113  << "\n";
2114 
2115  for (const auto& lcPair : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
2116  //3d score here without the denominator at this point
2117  if (score3d[cpId][lcPair.first] == FLT_MAX) {
2118  score3d[cpId][lcPair.first] = 0.f;
2119  }
2120  score3d[cpId][lcPair.first] += lcPair.second.second;
2121  mclsharedenergy[cpId][lcPair.first] += lcPair.second.first;
2122  }
2123  } //end of loop through layers
2124 
2125  // Compute the correct normalization
2126  // We need to loop on the cPOnLayer data structure since this is the
2127  // only one that has the compressed information for multiple usage
2128  // of the same DetId by different SimClusters by a single CaloParticle.
2129  float invCPEnergyWeight = 0.f;
2130  for (const auto& layer : cPOnLayer[cpId]) {
2131  for (const auto& haf : layer.hits_and_fractions) {
2132  invCPEnergyWeight +=
2133  (haf.second * hitMap.at(haf.first)->energy()) * (haf.second * hitMap.at(haf.first)->energy());
2134  }
2135  }
2136  invCPEnergyWeight = 1.f / invCPEnergyWeight;
2137 
2138  //Loop through related multiclusters here
2139  //Will switch to vector for access because it is faster
2140  std::vector<int> cpId_mclId_related_vec(cpId_mclId_related.begin(), cpId_mclId_related.end());
2141  for (unsigned int i = 0; i < cpId_mclId_related_vec.size(); ++i) {
2142  auto mclId = cpId_mclId_related_vec[i];
2143  //Now time for the denominator
2144  score3d[cpId][mclId] = score3d[cpId][mclId] * invCPEnergyWeight;
2145  mclsharedenergyfrac[cpId][mclId] = (mclsharedenergy[cpId][mclId] / CPenergy);
2146 
2147  LogDebug("HGCalValidator") << "CP Id: \t" << cpId << "\t MCL id: \t" << mclId << "\t score \t" //
2148  << score3d[cpId][mclId] << "\t"
2149  << "invCPEnergyWeight \t" << invCPEnergyWeight << "\t"
2150  << "shared energy:\t" << mclsharedenergy[cpId][mclId] << "\t"
2151  << "shared energy fraction:\t" << mclsharedenergyfrac[cpId][mclId] << "\n";
2152 
2153  histograms.h_score_caloparticle2multicl[count]->Fill(score3d[cpId][mclId]);
2154 
2155  histograms.h_sharedenergy_caloparticle2multicl[count]->Fill(mclsharedenergyfrac[cpId][mclId]);
2156  histograms.h_energy_vs_score_caloparticle2multicl[count]->Fill(score3d[cpId][mclId],
2157  mclsharedenergyfrac[cpId][mclId]);
2158  } //end of loop through multiclusters
2159 
2160  auto is_assoc = [&](const auto& v) -> bool { return v < ScoreCutCPtoMCLDup_; };
2161 
2162  auto assocDup = std::count_if(std::begin(score3d[cpId]), std::end(score3d[cpId]), is_assoc);
2163 
2164  if (assocDup > 0) {
2165  histograms.h_num_caloparticle_eta[count]->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
2166  histograms.h_num_caloparticle_phi[count]->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
2167  auto best = std::min_element(std::begin(score3d[cpId]), std::end(score3d[cpId]));
2168  auto bestmclId = std::distance(std::begin(score3d[cpId]), best);
2169 
2170  histograms.h_sharedenergy_caloparticle2multicl_vs_eta[count]->Fill(cP[cpId].g4Tracks()[0].momentum().eta(),
2171  multiClusters[bestmclId].energy() / CPenergy);
2172  histograms.h_sharedenergy_caloparticle2multicl_vs_phi[count]->Fill(cP[cpId].g4Tracks()[0].momentum().phi(),
2173  multiClusters[bestmclId].energy() / CPenergy);
2174  }
2175  if (assocDup >= 2) {
2176  auto match = std::find_if(std::begin(score3d[cpId]), std::end(score3d[cpId]), is_assoc);
2177  while (match != score3d[cpId].end()) {
2178  tracksters_duplicate[std::distance(std::begin(score3d[cpId]), match)] = 1;
2179  match = std::find_if(std::next(match), std::end(score3d[cpId]), is_assoc);
2180  }
2181  }
2182  histograms.h_denom_caloparticle_eta[count]->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
2183  histograms.h_denom_caloparticle_phi[count]->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
2184 
2185  } //end of loop through caloparticles
2186 
2187  // Here we do fill the plots to compute the different metrics linked to
2188  // reco-level, namely fake-rate an merge-rate. In this loop we should *not*
2189  // restrict only to the selected caloParaticles.
2190  for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
2191  const auto& hits_and_fractions = multiClusters[mclId].hitsAndFractions();
2192  if (!hits_and_fractions.empty()) {
2193  auto assocFakeMerge = tracksters_fakemerge[mclId];
2194  auto assocDuplicate = tracksters_duplicate[mclId];
2195  if (assocDuplicate) {
2196  histograms.h_numDup_multicl_eta[count]->Fill(multiClusters[mclId].eta());
2197  histograms.h_numDup_multicl_phi[count]->Fill(multiClusters[mclId].phi());
2198  }
2199  if (assocFakeMerge > 0) {
2200  histograms.h_num_multicl_eta[count]->Fill(multiClusters[mclId].eta());
2201  histograms.h_num_multicl_phi[count]->Fill(multiClusters[mclId].phi());
2202  auto best = std::min_element(std::begin(cpsInMultiCluster[mclId]),
2203  std::end(cpsInMultiCluster[mclId]),
2204  [](const auto& obj1, const auto& obj2) { return obj1.second < obj2.second; });
2205 
2206  //This is the shared energy taking the best caloparticle in each layer
2207  float sharedeneCPallLayers = 0.;
2208  //Loop through all layers
2209  for (unsigned int j = 0; j < layers * 2; ++j) {
2210  auto const& best_cp_linked = cPOnLayer[best->first][j].layerClusterIdToEnergyAndScore[mclId];
2211  sharedeneCPallLayers += best_cp_linked.first;
2212  } //end of loop through layers
2213  histograms.h_sharedenergy_multicl2caloparticle_vs_eta[count]->Fill(
2214  multiClusters[mclId].eta(), sharedeneCPallLayers / multiClusters[mclId].energy());
2215  histograms.h_sharedenergy_multicl2caloparticle_vs_phi[count]->Fill(
2216  multiClusters[mclId].phi(), sharedeneCPallLayers / multiClusters[mclId].energy());
2217  }
2218  if (assocFakeMerge >= 2) {
2219  histograms.h_numMerge_multicl_eta[count]->Fill(multiClusters[mclId].eta());
2220  histograms.h_numMerge_multicl_phi[count]->Fill(multiClusters[mclId].phi());
2221  }
2222  histograms.h_denom_multicl_eta[count]->Fill(multiClusters[mclId].eta());
2223  histograms.h_denom_multicl_phi[count]->Fill(multiClusters[mclId].phi());
2224  }
2225  }
2226 }
2227 
2229  int count,
2230  const std::vector<reco::HGCalMultiCluster>& multiClusters,
2231  std::vector<CaloParticle> const& cP,
2232  std::vector<size_t> const& cPIndices,
2233  std::vector<size_t> const& cPSelectedIndices,
2234  std::map<DetId, const HGCRecHit*> const& hitMap,
2235  unsigned layers) const {
2236  //Each event to be treated as two events:
2237  //an event in +ve endcap, plus another event in -ve endcap.
2238 
2239  //To keep track of total num of multiclusters
2240  int tnmclmz = 0; //-z
2241  int tnmclpz = 0; //+z
2242  //To count the number of multiclusters with 3 contiguous layers per event.
2243  int tncontmclpz = 0; //+z
2244  int tncontmclmz = 0; //-z
2245  //For the number of multiclusters without 3 contiguous layers per event.
2246  int tnnoncontmclpz = 0; //+z
2247  int tnnoncontmclmz = 0; //-z
2248  //We want to check below the score of cont and non cont multiclusters
2249  std::vector<bool> contmulti;
2250  contmulti.clear();
2251 
2252  //[mclId]-> vector of 2d layer clusters size
2253  std::unordered_map<unsigned int, std::vector<unsigned int>> multiplicity;
2254  //[mclId]-> [layer][cluster size]
2255  std::unordered_map<unsigned int, std::vector<unsigned int>> multiplicity_vs_layer;
2256  //We will need for the scale text option
2257  // unsigned int totallcinmcls = 0;
2258  // for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
2259  // totallcinmcls = totallcinmcls + multiClusters[mclId].clusters().size();
2260  // }
2261 
2262  auto nMultiClusters = multiClusters.size();
2263  //loop through multiclusters of the event
2264  for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
2265  const auto layerClusters = multiClusters[mclId].clusters();
2266  auto nLayerClusters = layerClusters.size();
2267 
2268  if (nLayerClusters == 0)
2269  continue;
2270 
2271  if (multiClusters[mclId].z() < 0.) {
2272  tnmclmz++;
2273  }
2274  if (multiClusters[mclId].z() > 0.) {
2275  tnmclpz++;
2276  }
2277 
2278  //Total number of layer clusters in multicluster
2279  int tnlcinmcl = 0;
2280 
2281  //To keep track of total num of layer clusters per multicluster
2282  //tnlcinmclperlaypz[layerid], tnlcinmclperlaymz[layerid]
2283  std::vector<int> tnlcinmclperlay(1000, 0); //+z
2284 
2285  //For the layers the multicluster expands to. Will use a set because there would be many
2286  //duplicates and then go back to vector for random access, since they say it is faster.
2287  std::set<int> multicluster_layers;
2288 
2289  bool multiclusterInZplus = false;
2290  bool multiclusterInZminus = false;
2291 
2292  //Loop through layer clusters
2293  for (unsigned int lcId = 0; lcId < nLayerClusters; ++lcId) {
2294  //take the hits and their fraction of the specific layer cluster.
2295  const std::vector<std::pair<DetId, float>>& hits_and_fractions = layerClusters[lcId]->hitsAndFractions();
2296 
2297  //For the multiplicity of the 2d layer clusters in multiclusters
2298  multiplicity[mclId].emplace_back(hits_and_fractions.size());
2299 
2300  const auto firstHitDetId = hits_and_fractions[0].first;
2301  //The layer that the layer cluster belongs to
2302  int layerid = recHitTools_->getLayerWithOffset(firstHitDetId) +
2303  layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
2304  multicluster_layers.insert(layerid);
2305  multiplicity_vs_layer[mclId].emplace_back(layerid);
2306 
2307  tnlcinmclperlay[layerid]++;
2308  tnlcinmcl++;
2309 
2310  if (recHitTools_->zside(firstHitDetId) > 0.) {
2311  multiclusterInZplus = true;
2312  }
2313  if (recHitTools_->zside(firstHitDetId) < 0.) {
2314  multiclusterInZminus = true;
2315  }
2316 
2317  } //end of loop through layerclusters
2318 
2319  //Per layer : Loop 0->99
2320  for (unsigned ilayer = 0; ilayer < layers * 2; ++ilayer) {
2321  if (histograms.h_clusternum_in_multicluster_perlayer[count].count(ilayer) && tnlcinmclperlay[ilayer] != 0) {
2322  histograms.h_clusternum_in_multicluster_perlayer[count].at(ilayer)->Fill((float)tnlcinmclperlay[ilayer]);
2323  }
2324  //For the profile now of 2d layer cluster in multiclusters vs layer number.
2325  if (tnlcinmclperlay[ilayer] != 0) {
2326  histograms.h_clusternum_in_multicluster_vs_layer[count]->Fill((float)ilayer, (float)tnlcinmclperlay[ilayer]);
2327  }
2328  } //end of loop over layers
2329 
2330  //Looking for multiclusters with 3 contiguous layers per event.
2331  std::vector<int> multicluster_layers_vec(multicluster_layers.begin(), multicluster_layers.end());
2332  //Since we want to also check for non contiguous multiclusters
2333  bool contimulti = false;
2334  //Observe that we start from 1 and go up to size - 1 element.
2335  if (multicluster_layers_vec.size() >= 3) {
2336  for (unsigned int i = 1; i < multicluster_layers_vec.size() - 1; ++i) {
2337  if ((multicluster_layers_vec[i - 1] + 1 == multicluster_layers_vec[i]) &&
2338  (multicluster_layers_vec[i + 1] - 1 == multicluster_layers_vec[i])) {
2339  //So, this is a multicluster with 3 contiguous layers per event
2340  if (multiclusterInZplus) {
2341  tncontmclpz++;
2342  }
2343  if (multiclusterInZminus) {
2344  tncontmclmz++;
2345  }
2346  contimulti = true;
2347  break;
2348  }
2349  }
2350  }
2351  //Count non contiguous multiclusters
2352  if (!contimulti) {
2353  if (multiclusterInZplus) {
2354  tnnoncontmclpz++;
2355  }
2356  if (multiclusterInZminus) {
2357  tnnoncontmclmz++;
2358  }
2359  }
2360 
2361  //Save for the score
2362  contmulti.push_back(contimulti);
2363 
2364  histograms.h_clusternum_in_multicluster[count]->Fill(tnlcinmcl);
2365 
2366  for (unsigned int lc = 0; lc < multiplicity[mclId].size(); ++lc) {
2367  //multiplicity of the current LC
2368  float mlp = std::count(std::begin(multiplicity[mclId]), std::end(multiplicity[mclId]), multiplicity[mclId][lc]);
2369  //LogDebug("HGCalValidator") << "mlp %" << (100. * mlp)/ ((float) nLayerClusters) << std::endl;
2370  // histograms.h_multiplicityOfLCinMCL[count]->Fill( mlp , multiplicity[mclId][lc] , 100. / (float) totallcinmcls );
2371  histograms.h_multiplicityOfLCinMCL[count]->Fill(mlp, multiplicity[mclId][lc]);
2372  //When we will plot with the text option we want the entries to be the same
2373  //as the % of the current cell over the whole number of clusters. For this we need an extra histo.
2374  histograms.h_multiplicity_numberOfEventsHistogram[count]->Fill(mlp);
2375  //For the cluster multiplicity vs layer
2376  //First with the -z endcap (V10:0->49)
2377  if (multiplicity_vs_layer[mclId][lc] < layers) {
2378  histograms.h_multiplicityOfLCinMCL_vs_layercluster_zminus[count]->Fill(mlp, multiplicity_vs_layer[mclId][lc]);
2379  histograms.h_multiplicity_zminus_numberOfEventsHistogram[count]->Fill(mlp);
2380  } else { //Then for the +z (V10:50->99)
2381  histograms.h_multiplicityOfLCinMCL_vs_layercluster_zplus[count]->Fill(
2382  mlp, multiplicity_vs_layer[mclId][lc] - layers);
2383  histograms.h_multiplicity_zplus_numberOfEventsHistogram[count]->Fill(mlp);
2384  }
2385  //For the cluster multiplicity vs cluster energy
2386  histograms.h_multiplicityOfLCinMCL_vs_layerclusterenergy[count]->Fill(mlp, layerClusters[lc]->energy());
2387  }
2388 
2389  if (!multicluster_layers.empty()) {
2390  histograms.h_multicluster_pt[count]->Fill(multiClusters[mclId].pt());
2391  histograms.h_multicluster_eta[count]->Fill(multiClusters[mclId].eta());
2392  histograms.h_multicluster_phi[count]->Fill(multiClusters[mclId].phi());
2393  histograms.h_multicluster_energy[count]->Fill(multiClusters[mclId].energy());
2394  histograms.h_multicluster_x[count]->Fill(multiClusters[mclId].x());
2395  histograms.h_multicluster_y[count]->Fill(multiClusters[mclId].y());
2396  histograms.h_multicluster_z[count]->Fill(multiClusters[mclId].z());
2397  histograms.h_multicluster_firstlayer[count]->Fill((float)*multicluster_layers.begin());
2398  histograms.h_multicluster_lastlayer[count]->Fill((float)*multicluster_layers.rbegin());
2399  histograms.h_multicluster_layersnum[count]->Fill((float)multicluster_layers.size());
2400  }
2401  } //end of loop through multiclusters
2402 
2403  histograms.h_multiclusternum[count]->Fill(tnmclmz + tnmclpz);
2404  histograms.h_contmulticlusternum[count]->Fill(tncontmclpz + tncontmclmz);
2405  histograms.h_noncontmulticlusternum[count]->Fill(tnnoncontmclpz + tnnoncontmclmz);
2406 
2407  multiClusters_to_CaloParticles(histograms, count, multiClusters, cP, cPIndices, cPSelectedIndices, hitMap, layers);
2408 }
2409 
2411  const double y1,
2412  const double x2,
2413  const double y2) const { //distance squared
2414  const double dx = x1 - x2;
2415  const double dy = y1 - y2;
2416  return (dx * dx + dy * dy);
2417 } //distance squaredq
2419  const double y1,
2420  const double x2,
2421  const double y2) const { //2-d distance on the layer (x-y)
2422  return std::sqrt(distance2(x1, y1, x2, y2));
2423 }
2424 
2425 void HGVHistoProducerAlgo::setRecHitTools(std::shared_ptr<hgcal::RecHitTools> recHitTools) {
2426  recHitTools_ = recHitTools;
2427 }
2428 
2430  std::map<DetId, const HGCRecHit*> const& hitMap) const {
2431  DetId themaxid;
2432  const std::vector<std::pair<DetId, float>>& hits_and_fractions = cluster.hitsAndFractions();
2433 
2434  double maxene = 0.;
2435  for (std::vector<std::pair<DetId, float>>::const_iterator it_haf = hits_and_fractions.begin();
2436  it_haf != hits_and_fractions.end();
2437  ++it_haf) {
2438  DetId rh_detid = it_haf->first;
2439 
2440  std::map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
2441  const HGCRecHit* hit = itcheck->second;
2442 
2443  if (maxene < hit->energy()) {
2444  maxene = hit->energy();
2445  themaxid = rh_detid;
2446  }
2447  }
2448 
2449  return themaxid;
2450 }
2451 
2452 double HGVHistoProducerAlgo::getEta(double eta) const {
2453  if (useFabsEta_)
2454  return fabs(eta);
2455  else
2456  return eta;
2457 }
HGVHistoProducerAlgo::nintLongDepBary_
int nintLongDepBary_
Definition: HGVHistoProducerAlgo.h:252
HGVHistoProducerAlgo::maxSizeCLsinMCLs_
double maxSizeCLsinMCLs_
Definition: HGVHistoProducerAlgo.h:291
HGVHistoProducerAlgo::multiClusters_to_CaloParticles
void multiClusters_to_CaloParticles(const Histograms &histograms, int count, const std::vector< reco::HGCalMultiCluster > &multiClusters, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::vector< size_t > const &cPSelectedIndices, std::map< DetId, const HGCRecHit * > const &, unsigned layers) const
Definition: HGVHistoProducerAlgo.cc:1622
HGVHistoProducerAlgo::maxEta_
double maxEta_
Definition: HGVHistoProducerAlgo.h:238
DDAxes::y
HGVHistoProducerAlgo::bookInfo
void bookInfo(DQMStore::IBooker &ibook, Histograms &histograms)
Definition: HGVHistoProducerAlgo.cc:186
HGVHistoProducerAlgo::maxY_
double maxY_
Definition: HGVHistoProducerAlgo.h:297
electrons_cff.bool
bool
Definition: electrons_cff.py:372
HGVHistoProducerAlgo::nintTotNcellsperthickperlayer_
int nintTotNcellsperthickperlayer_
Definition: HGVHistoProducerAlgo.h:268
mps_fire.i
i
Definition: mps_fire.py:355
HGVHistoProducerAlgo::bookMultiClusterHistos
void bookMultiClusterHistos(DQMStore::IBooker &ibook, Histograms &histograms, unsigned layers)
Definition: HGVHistoProducerAlgo.cc:535
HGVHistoProducerAlgo::maxClEnepermultiplicity_
double maxClEnepermultiplicity_
Definition: HGVHistoProducerAlgo.h:293
HGVHistoProducerAlgo::nintDisSeedToMaxperthickperlayer_
int nintDisSeedToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:278
MessageLogger.h
HGVHistoProducerAlgo::minMixedHitsCluster_
double minMixedHitsCluster_
Definition: HGVHistoProducerAlgo.h:247
HGVHistoProducerAlgo::useFabsEta_
bool useFabsEta_
Definition: HGVHistoProducerAlgo.h:240
HGVHistoProducerAlgo::minZ_
double minZ_
Definition: HGVHistoProducerAlgo.h:299
HGVHistoProducerAlgo::fill_caloparticle_histos
void fill_caloparticle_histos(const Histograms &histograms, int pdgid, const CaloParticle &caloparticle, std::vector< SimVertex > const &simVertices) const
Definition: HGVHistoProducerAlgo.cc:780
CaloParticle::eta
float eta() const
Momentum pseudorapidity. Note this is taken from the simtrack before the calorimeter.
Definition: CaloParticle.h:142
HGVHistoProducerAlgo::maxMixedHitsCluster_
double maxMixedHitsCluster_
Definition: HGVHistoProducerAlgo.h:247
HGVHistoProducerAlgo::nintClEnepermultiplicity_
int nintClEnepermultiplicity_
Definition: HGVHistoProducerAlgo.h:294
HGVHistoProducerAlgo::nintPhi_
int nintPhi_
Definition: HGVHistoProducerAlgo.h:246
HGVHistoProducerAlgo::maxEneClperlay_
double maxEneClperlay_
Definition: HGVHistoProducerAlgo.h:257
f
double f[11][100]
Definition: MuScleFitUtils.cc:78
HGVHistoProducerAlgo::nintZpos_
int nintZpos_
Definition: HGVHistoProducerAlgo.h:254
HGVHistoProducerAlgo::minDisToMaxperthickperlayer_
double minDisToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:273
DiDispStaMuonMonitor_cfi.pt
pt
Definition: DiDispStaMuonMonitor_cfi.py:39
HGVHistoProducerAlgo::layerClusters_to_CaloParticles
void layerClusters_to_CaloParticles(const Histograms &histograms, const reco::CaloClusterCollection &clusters, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::vector< size_t > const &cPSelectedIndices, std::map< DetId, const HGCRecHit * > const &, unsigned layers) const
Definition: HGVHistoProducerAlgo.cc:811
min
T min(T a, T b)
Definition: MathUtil.h:58
ecaldqm::zside
int zside(DetId const &)
Definition: EcalDQMCommonUtils.cc:189
testProducerWithPsetDescEmpty_cfi.x2
x2
Definition: testProducerWithPsetDescEmpty_cfi.py:28
HGVHistoProducerAlgo::minX_
double minX_
Definition: HGVHistoProducerAlgo.h:295
DetId::det
constexpr Detector det() const
get the detector field from this detid
Definition: DetId.h:46
HGVHistoProducerAlgo::minTotNClsinMCLs_
double minTotNClsinMCLs_
Definition: HGVHistoProducerAlgo.h:285
HGVHistoProducerAlgo::~HGVHistoProducerAlgo
~HGVHistoProducerAlgo()
Definition: HGVHistoProducerAlgo.cc:184
HLT_2018_cff.distance
distance
Definition: HLT_2018_cff.py:6417
HGVHistoProducerAlgo::maxTotNClsperthick_
double maxTotNClsperthick_
Definition: HGVHistoProducerAlgo.h:265
HGVHistoProducerAlgo::nintEne_
int nintEne_
Definition: HGVHistoProducerAlgo.h:242
HGVHistoProducerAlgo::HGVHistoProducerAlgo
HGVHistoProducerAlgo(const edm::ParameterSet &pset)
Definition: HGVHistoProducerAlgo.cc:20
CaloParticle::g4Tracks
const std::vector< SimTrack > & g4Tracks() const
Definition: CaloParticle.h:74
CaloParticle::energy
float energy() const
Energy. Note this is taken from the first SimTrack only.
Definition: CaloParticle.h:98
DDAxes::x
edm::RefVector< SimClusterCollection >
SimCluster
Monte Carlo truth information used for tracking validation.
Definition: SimCluster.h:29
findQualityFiles.v
v
Definition: findQualityFiles.py:179
ScoreCutLCtoCP_
const double ScoreCutLCtoCP_
Definition: HGVHistoProducerAlgo.cc:15
HGVHistoProducerAlgo::minEneClperlay_
double minEneClperlay_
Definition: HGVHistoProducerAlgo.h:257
spr::find
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
Definition: FindCaloHit.cc:19
newFWLiteAna.found
found
Definition: newFWLiteAna.py:118
HGVHistoProducerAlgo::fill_generic_cluster_histos
void fill_generic_cluster_histos(const Histograms &histograms, int count, const reco::CaloClusterCollection &clusters, const Density &densities, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::vector< size_t > const &cPSelectedIndices, std::map< DetId, const HGCRecHit * > const &, std::map< double, double > cummatbudg, unsigned layers, std::vector< int > thicknesses) const
Definition: HGVHistoProducerAlgo.cc:1309
HGVHistoProducerAlgo::findmaxhit
DetId findmaxhit(const reco::CaloCluster &cluster, std::map< DetId, const HGCRecHit * > const &) const
Definition: HGVHistoProducerAlgo.cc:2429
HGVHistoProducerAlgo::minSizeCLsinMCLs_
double minSizeCLsinMCLs_
Definition: HGVHistoProducerAlgo.h:291
end
#define end
Definition: vmac.h:39
HGVHistoProducerAlgo::nintDisToMaxperthickperlayerenewei_
int nintDisToMaxperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:276
trackingPlots.assoc
assoc
Definition: trackingPlots.py:183
HGVHistoProducerAlgo::nintEneCl_
int nintEneCl_
Definition: HGVHistoProducerAlgo.h:250
ScoreCutMCLtoCPFakeMerge_
const double ScoreCutMCLtoCPFakeMerge_
Definition: HGVHistoProducerAlgo.cc:17
HGVHistoProducerAlgo::maxDisToMaxperthickperlayerenewei_
double maxDisToMaxperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:275
HGVHistoProducerAlgo::maxEneCl_
double maxEneCl_
Definition: HGVHistoProducerAlgo.h:249
DetId
Definition: DetId.h:17
HGVHistoProducerAlgo::setRecHitTools
void setRecHitTools(std::shared_ptr< hgcal::RecHitTools > recHitTools)
Definition: HGVHistoProducerAlgo.cc:2425
HGVHistoProducerAlgo::minLongDepBary_
double minLongDepBary_
Definition: HGVHistoProducerAlgo.h:251
HGVHistoProducerAlgo::maxZpos_
double maxZpos_
Definition: HGVHistoProducerAlgo.h:253
DetId::HGCalHSi
Definition: DetId.h:33
DetId::HGCalEE
Definition: DetId.h:32
testProducerWithPsetDescEmpty_cfi.x1
x1
Definition: testProducerWithPsetDescEmpty_cfi.py:33
dqmdumpme.last
last
Definition: dqmdumpme.py:56
testProducerWithPsetDescEmpty_cfi.y1
y1
Definition: testProducerWithPsetDescEmpty_cfi.py:29
HGVHistoProducerAlgo::nintTotNMCLs_
int nintTotNMCLs_
Definition: HGVHistoProducerAlgo.h:284
HGVHistoProducerAlgo::nintTotNClsinMCLs_
int nintTotNClsinMCLs_
Definition: HGVHistoProducerAlgo.h:286
HGVHistoProducerAlgo.h
SimCluster.h
h
HGVHistoProducerAlgo::maxMplofLCs_
double maxMplofLCs_
Definition: HGVHistoProducerAlgo.h:289
HGVHistoProducerAlgo::nintDisToSeedperthickperlayer_
int nintDisToSeedperthickperlayer_
Definition: HGVHistoProducerAlgo.h:270
HGVHistoProducerAlgo::minEneCl_
double minEneCl_
Definition: HGVHistoProducerAlgo.h:249
Calorimetry_cff.thickness
thickness
Definition: Calorimetry_cff.py:114
PVValHelper::eta
Definition: PVValidationHelpers.h:69
HGVHistoProducerAlgo::maxLongDepBary_
double maxLongDepBary_
Definition: HGVHistoProducerAlgo.h:251
reco::CaloCluster
Definition: CaloCluster.h:31
mathSSE::sqrt
T sqrt(T t)
Definition: SSEVec.h:19
reco::CaloClusterCollection
std::vector< CaloCluster > CaloClusterCollection
collection of CaloCluster objects
Definition: CaloClusterFwd.h:19
ScoreCutCPtoMCLDup_
const double ScoreCutCPtoMCLDup_
Definition: HGVHistoProducerAlgo.cc:18
DDAxes::z
HGVHistoProducerAlgo::nintMplofLCs_
int nintMplofLCs_
Definition: HGVHistoProducerAlgo.h:290
dqm::implementation::IBooker::bookProfile
MonitorElement * bookProfile(TString const &name, TString const &title, int nchX, double lowX, double highX, int, double lowY, double highY, char const *option="s", FUNC onbooking=NOOP())
Definition: DQMStore.h:322
HGVHistoProducerAlgo::maxDisToSeedperthickperlayerenewei_
double maxDisToSeedperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:271
CaloParticle::phi
float phi() const
Momentum azimuthal angle. Note this is taken from the first SimTrack only.
Definition: CaloParticle.h:134
HCALHighEnergyHPDFilter_cfi.energy
energy
Definition: HCALHighEnergyHPDFilter_cfi.py:5
HGVHistoProducerAlgo::bookClusterHistos
void bookClusterHistos(DQMStore::IBooker &ibook, Histograms &histograms, unsigned layers, std::vector< int > thicknesses, std::string pathtomatbudfile)
Definition: HGVHistoProducerAlgo.cc:208
HGVHistoProducerAlgo::getEta
double getEta(double eta) const
Definition: HGVHistoProducerAlgo.cc:2452
HGVHistoProducerAlgo::minClEneperthickperlayer_
double minClEneperthickperlayer_
Definition: HGVHistoProducerAlgo.h:279
HGVHistoProducerAlgo::minTotNClsinMCLsperlayer_
double minTotNClsinMCLsperlayer_
Definition: HGVHistoProducerAlgo.h:287
HGVHistoProducerAlgo::nintSizeCLsinMCLs_
int nintSizeCLsinMCLs_
Definition: HGVHistoProducerAlgo.h:292
HGVHistoProducerAlgo::minClEnepermultiplicity_
double minClEnepermultiplicity_
Definition: HGVHistoProducerAlgo.h:293
hltEgammaHGCALIDVarsL1Seeded_cfi.layerClusters
layerClusters
Definition: hltEgammaHGCALIDVarsL1Seeded_cfi.py:5
HGVHistoProducerAlgo::maxTotNClsinMCLsperlayer_
double maxTotNClsinMCLsperlayer_
Definition: HGVHistoProducerAlgo.h:287
HGVHistoProducerAlgo::nintZ_
int nintZ_
Definition: HGVHistoProducerAlgo.h:300
HGVHistoProducerAlgo::nintPt_
int nintPt_
Definition: HGVHistoProducerAlgo.h:244
HGVHistoProducerAlgo::nintTotNClsperlay_
int nintTotNClsperlay_
Definition: HGVHistoProducerAlgo.h:256
HGVHistoProducerAlgo::nintSharedEneFrac_
int nintSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:262
HGVHistoProducerAlgo::maxPt_
double maxPt_
Definition: HGVHistoProducerAlgo.h:243
HGVHistoProducerAlgo::minSharedEneFrac_
double minSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:261
getGTfromDQMFile.obj
obj
Definition: getGTfromDQMFile.py:32
HGVHistoProducerAlgo::nintScore_
int nintScore_
Definition: HGVHistoProducerAlgo.h:260
HGVHistoProducerAlgo::fill_cluster_histos
void fill_cluster_histos(const Histograms &histograms, int count, const reco::CaloCluster &cluster) const
Definition: HGVHistoProducerAlgo.cc:804
HGVHistoProducerAlgo::maxDisToSeedperthickperlayer_
double maxDisToSeedperthickperlayer_
Definition: HGVHistoProducerAlgo.h:269
HGVHistoProducerAlgo::recHitTools_
std::shared_ptr< hgcal::RecHitTools > recHitTools_
Definition: HGVHistoProducerAlgo.h:235
AlCaHLTBitMon_QueryRunRegistry.string
string
Definition: AlCaHLTBitMon_QueryRunRegistry.py:256
HGVHistoProducerAlgo::minDisToSeedperthickperlayer_
double minDisToSeedperthickperlayer_
Definition: HGVHistoProducerAlgo.h:269
HGCRecHit
Definition: HGCRecHit.h:14
HGVHistoProducerAlgo::distance
double distance(const double x1, const double y1, const double x2, const double y2) const
Definition: HGVHistoProducerAlgo.cc:2418
HGVHistoProducerAlgo::nintCellsEneDensperthick_
int nintCellsEneDensperthick_
Definition: HGVHistoProducerAlgo.h:282
HGVHistoProducerAlgo::minPhi_
double minPhi_
Definition: HGVHistoProducerAlgo.h:245
bsc_activity_cfg.clusters
clusters
Definition: bsc_activity_cfg.py:36
testProducerWithPsetDescEmpty_cfi.y2
y2
Definition: testProducerWithPsetDescEmpty_cfi.py:30
LogDebug
#define LogDebug(id)
Definition: MessageLogger.h:670
edm::ParameterSet
Definition: ParameterSet.h:36
HGVHistoProducerAlgo::maxClEneperthickperlayer_
double maxClEneperthickperlayer_
Definition: HGVHistoProducerAlgo.h:279
CaloParticle
Definition: CaloParticle.h:16
reco::CaloCluster::eta
double eta() const
pseudorapidity of cluster centroid
Definition: CaloCluster.h:181
reco::CaloCluster::hitsAndFractions
const std::vector< std::pair< DetId, float > > & hitsAndFractions() const
Definition: CaloCluster.h:210
KineDebug3::count
void count()
Definition: KinematicConstrainedVertexUpdatorT.h:21
HGVHistoProducerAlgo::nintEneClperlay_
int nintEneClperlay_
Definition: HGVHistoProducerAlgo.h:258
HGVHistoProducerAlgo::maxMCLSharedEneFrac_
double maxMCLSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:263
HGVHistoProducerAlgo::detIdInfoInMultiCluster
Definition: HGVHistoProducerAlgo.h:218
match
std::pair< typename Association::data_type::first_type, double > match(Reference key, Association association, bool bestMatchByMaxValue)
Generic matching function.
Definition: Utils.h:10
HGVHistoProducerAlgo::maxZ_
double maxZ_
Definition: HGVHistoProducerAlgo.h:299
ScoreCutCPtoLC_
const double ScoreCutCPtoLC_
Definition: HGVHistoProducerAlgo.cc:16
HGVHistoProducerAlgo::nintTotNClsinMCLsperlayer_
int nintTotNClsinMCLsperlayer_
Definition: HGVHistoProducerAlgo.h:288
position
static int position[264][3]
Definition: ReadPGInfo.cc:289
dqm::implementation::IBooker::bookInt
MonitorElement * bookInt(TString const &name, FUNC onbooking=NOOP())
Definition: DQMStore.h:73
HGVHistoProducerAlgo::nintClEneperthickperlayer_
int nintClEneperthickperlayer_
Definition: HGVHistoProducerAlgo.h:280
HGVHistoProducerAlgo::maxSharedEneFrac_
double maxSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:261
HGVHistoProducerAlgo::detIdInfoInCluster
Definition: HGVHistoProducerAlgo.h:212
createfilelist.int
int
Definition: createfilelist.py:10
HGVHistoProducerAlgo::minEta_
double minEta_
Definition: HGVHistoProducerAlgo.h:238
HGVHistoProducerAlgo::maxPhi_
double maxPhi_
Definition: HGVHistoProducerAlgo.h:245
HGVHistoProducerAlgo::maxX_
double maxX_
Definition: HGVHistoProducerAlgo.h:295
PVValHelper::dy
Definition: PVValidationHelpers.h:49
histograms
Definition: histograms.py:1
HltBtagPostValidation_cff.c
c
Definition: HltBtagPostValidation_cff.py:31
HGVHistoProducerAlgo::minPt_
double minPt_
Definition: HGVHistoProducerAlgo.h:243
HGVHistoProducerAlgo::nintDisToMaxperthickperlayer_
int nintDisToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:274
cms::cuda::for
for(int i=first, nt=offsets[nh];i< nt;i+=gridDim.x *blockDim.x)
Definition: HistoContainer.h:27
CaloParticle::pt
float pt() const
Transverse momentum. Note this is taken from the first SimTrack only.
Definition: CaloParticle.h:130
HGVHistoProducerAlgo::nintTotNClsperthick_
int nintTotNClsperthick_
Definition: HGVHistoProducerAlgo.h:266
HGVHistoProducerAlgo::nintMixedHitsCluster_
int nintMixedHitsCluster_
Definition: HGVHistoProducerAlgo.h:248
HGVHistoProducerAlgo::maxTotNClsperlay_
double maxTotNClsperlay_
Definition: HGVHistoProducerAlgo.h:255
HGVHistoProducerAlgo::minY_
double minY_
Definition: HGVHistoProducerAlgo.h:297
DDAxes::phi
HGVHistoProducerAlgo::maxTotNcellsperthickperlayer_
double maxTotNcellsperthickperlayer_
Definition: HGVHistoProducerAlgo.h:267
HGCalDetId
Definition: HGCalDetId.h:8
HGVHistoProducerAlgo::bookCaloParticleHistos
void bookCaloParticleHistos(DQMStore::IBooker &ibook, Histograms &histograms, int pdgid)
Definition: HGVHistoProducerAlgo.cc:195
eostools.move
def move(src, dest)
Definition: eostools.py:511
std
Definition: JetResolutionObject.h:76
DetId::rawId
constexpr uint32_t rawId() const
get the raw id
Definition: DetId.h:57
HGVHistoProducerAlgo::maxDisSeedToMaxperthickperlayer_
double maxDisSeedToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:277
HGVHistoProducerAlgo::nintEta_
int nintEta_
Definition: HGVHistoProducerAlgo.h:239
HGVHistoProducerAlgo::maxCellsEneDensperthick_
double maxCellsEneDensperthick_
Definition: HGVHistoProducerAlgo.h:281
tier0.unique
def unique(seq, keepstr=True)
Definition: tier0.py:24
HGVHistoProducerAlgo::maxTotNMCLs_
double maxTotNMCLs_
Definition: HGVHistoProducerAlgo.h:283
HGVHistoProducerAlgo::nintY_
int nintY_
Definition: HGVHistoProducerAlgo.h:298
DetId::HGCalHSc
Definition: DetId.h:34
HGVHistoProducerAlgo::minTotNClsperlay_
double minTotNClsperlay_
Definition: HGVHistoProducerAlgo.h:255
HGVHistoProducerAlgo::minDisToMaxperthickperlayerenewei_
double minDisToMaxperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:275
dqm::implementation::IBooker::book2D
MonitorElement * book2D(TString const &name, TString const &title, int nchX, double lowX, double highX, int nchY, double lowY, double highY, FUNC onbooking=NOOP())
Definition: DQMStore.h:177
relativeConstraints.empty
bool empty
Definition: relativeConstraints.py:46
HGVHistoProducerAlgo::minCellsEneDensperthick_
double minCellsEneDensperthick_
Definition: HGVHistoProducerAlgo.h:281
HGVHistoProducerAlgo::minZpos_
double minZpos_
Definition: HGVHistoProducerAlgo.h:253
HGVHistoProducerAlgo::minEne_
double minEne_
Definition: HGVHistoProducerAlgo.h:241
HGVHistoProducerAlgo::minTotNcellsperthickperlayer_
double minTotNcellsperthickperlayer_
Definition: HGVHistoProducerAlgo.h:267
dqm::implementation::IBooker
Definition: DQMStore.h:43
HGVHistoProducerAlgo::fill_multi_cluster_histos
void fill_multi_cluster_histos(const Histograms &histograms, int count, const std::vector< reco::HGCalMultiCluster > &multiClusters, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::vector< size_t > const &cPSelectedIndices, std::map< DetId, const HGCRecHit * > const &, unsigned layers) const
Definition: HGVHistoProducerAlgo.cc:2228
HGVHistoProducerAlgo::distance2
double distance2(const double x1, const double y1, const double x2, const double y2) const
Definition: HGVHistoProducerAlgo.cc:2410
SimCluster::hits_and_fractions
std::vector< std::pair< uint32_t, float > > hits_and_fractions() const
Returns list of rechit IDs and fractions for this SimCluster.
Definition: SimCluster.h:184
HGVHistoProducerAlgo::maxDisToMaxperthickperlayer_
double maxDisToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:273
HGVHistoProducerAlgo::maxEne_
double maxEne_
Definition: HGVHistoProducerAlgo.h:241
HGVHistoProducerAlgo::maxScore_
double maxScore_
Definition: HGVHistoProducerAlgo.h:259
HGCalValidator_cfi.simVertices
simVertices
Definition: HGCalValidator_cfi.py:43
offlineSlimmedPrimaryVertices_cfi.score
score
Definition: offlineSlimmedPrimaryVertices_cfi.py:6
HGVHistoProducerAlgo::nintDisToSeedperthickperlayerenewei_
int nintDisToSeedperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:272
DetId::Forward
Definition: DetId.h:30
dqmiolumiharvest.j
j
Definition: dqmiolumiharvest.py:66
HGVHistoProducerAlgo::nintX_
int nintX_
Definition: HGVHistoProducerAlgo.h:296
EgammaValidation_cff.pdgid
pdgid
Definition: EgammaValidation_cff.py:30
HGVHistoProducerAlgo::minTotNClsperthick_
double minTotNClsperthick_
Definition: HGVHistoProducerAlgo.h:265
CommonMethods.cp
def cp(fromDir, toDir, listOfFiles, overwrite=False, smallList=False)
Definition: CommonMethods.py:192
HGVHistoProducerAlgo::minTotNMCLs_
double minTotNMCLs_
Definition: HGVHistoProducerAlgo.h:283
HGVHistoProducerAlgoHistograms
Definition: HGVHistoProducerAlgo.h:29
HGVHistoProducerAlgo::fill_info_histos
void fill_info_histos(const Histograms &histograms, unsigned layers) const
Definition: HGVHistoProducerAlgo.cc:767
begin
#define begin
Definition: vmac.h:32
PVValHelper::dx
Definition: PVValidationHelpers.h:48
HGVHistoProducerAlgo::minScore_
double minScore_
Definition: HGVHistoProducerAlgo.h:259
hgcalTopologyTester_cfi.layers
layers
Definition: hgcalTopologyTester_cfi.py:8
HGVHistoProducerAlgo::minDisSeedToMaxperthickperlayer_
double minDisSeedToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:277
HGVHistoProducerAlgo::minDisToSeedperthickperlayerenewei_
double minDisToSeedperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:271
hit
Definition: SiStripHitEffFromCalibTree.cc:88
muonDTDigis_cfi.pset
pset
Definition: muonDTDigis_cfi.py:27
dqm::implementation::IBooker::book1D
MonitorElement * book1D(TString const &name, TString const &title, int const nchX, double const lowX, double const highX, FUNC onbooking=NOOP())
Definition: DQMStore.h:98
HGVHistoProducerAlgo::minMplofLCs_
double minMplofLCs_
Definition: HGVHistoProducerAlgo.h:289
HGVHistoProducerAlgo::minMCLSharedEneFrac_
double minMCLSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:263
Density
hgcal_clustering::Density Density
Definition: HGCalImagingAlgo.h:29
GetRecoTauVFromDQM_MC_cff.next
next
Definition: GetRecoTauVFromDQM_MC_cff.py:31
HGVHistoProducerAlgo::maxTotNClsinMCLs_
double maxTotNClsinMCLs_
Definition: HGVHistoProducerAlgo.h:285