CMS 3D CMS Logo

HGVHistoProducerAlgo.cc
Go to the documentation of this file.
1 #include <numeric>
2 #include <iomanip>
3 
7 #include "TMath.h"
8 #include "TLatex.h"
9 #include "TF1.h"
10 
11 using namespace std;
12 
13 //Parameters for the score cut. Later, this will become part of the
14 //configuration parameter for the HGCAL associator.
15 const double ScoreCutLCtoCP_ = 0.1;
16 const double ScoreCutCPtoLC_ = 0.1;
17 const double ScoreCutLCtoSC_ = 0.1;
18 const double ScoreCutSCtoLC_ = 0.1;
19 const double ScoreCutMCLtoCPFakeMerge_ = 0.6;
20 const double ScoreCutCPtoMCLEffDup_ = 0.2;
21 
23  : //parameters for eta
24  minEta_(pset.getParameter<double>("minEta")),
25  maxEta_(pset.getParameter<double>("maxEta")),
26  nintEta_(pset.getParameter<int>("nintEta")),
27  useFabsEta_(pset.getParameter<bool>("useFabsEta")),
28 
29  //parameters for energy
30  minEne_(pset.getParameter<double>("minEne")),
31  maxEne_(pset.getParameter<double>("maxEne")),
32  nintEne_(pset.getParameter<int>("nintEne")),
33 
34  //parameters for pt
35  minPt_(pset.getParameter<double>("minPt")),
36  maxPt_(pset.getParameter<double>("maxPt")),
37  nintPt_(pset.getParameter<int>("nintPt")),
38 
39  //parameters for phi
40  minPhi_(pset.getParameter<double>("minPhi")),
41  maxPhi_(pset.getParameter<double>("maxPhi")),
42  nintPhi_(pset.getParameter<int>("nintPhi")),
43 
44  //parameters for counting mixed hits simclusters
45  minMixedHitsSimCluster_(pset.getParameter<double>("minMixedHitsSimCluster")),
46  maxMixedHitsSimCluster_(pset.getParameter<double>("maxMixedHitsSimCluster")),
47  nintMixedHitsSimCluster_(pset.getParameter<int>("nintMixedHitsSimCluster")),
48 
49  //parameters for counting mixed hits clusters
50  minMixedHitsCluster_(pset.getParameter<double>("minMixedHitsCluster")),
51  maxMixedHitsCluster_(pset.getParameter<double>("maxMixedHitsCluster")),
52  nintMixedHitsCluster_(pset.getParameter<int>("nintMixedHitsCluster")),
53 
54  //parameters for the total amount of energy clustered by all layer clusters (fraction over caloparticles)
55  minEneCl_(pset.getParameter<double>("minEneCl")),
56  maxEneCl_(pset.getParameter<double>("maxEneCl")),
57  nintEneCl_(pset.getParameter<int>("nintEneCl")),
58 
59  //parameters for the longitudinal depth barycenter.
60  minLongDepBary_(pset.getParameter<double>("minLongDepBary")),
61  maxLongDepBary_(pset.getParameter<double>("maxLongDepBary")),
62  nintLongDepBary_(pset.getParameter<int>("nintLongDepBary")),
63 
64  //parameters for z positionof vertex plots
65  minZpos_(pset.getParameter<double>("minZpos")),
66  maxZpos_(pset.getParameter<double>("maxZpos")),
67  nintZpos_(pset.getParameter<int>("nintZpos")),
68 
69  //Parameters for the total number of simclusters per layer
70  minTotNsimClsperlay_(pset.getParameter<double>("minTotNsimClsperlay")),
71  maxTotNsimClsperlay_(pset.getParameter<double>("maxTotNsimClsperlay")),
72  nintTotNsimClsperlay_(pset.getParameter<int>("nintTotNsimClsperlay")),
73 
74  //Parameters for the total number of layer clusters per layer
75  minTotNClsperlay_(pset.getParameter<double>("minTotNClsperlay")),
76  maxTotNClsperlay_(pset.getParameter<double>("maxTotNClsperlay")),
77  nintTotNClsperlay_(pset.getParameter<int>("nintTotNClsperlay")),
78 
79  //Parameters for the energy clustered by layer clusters per layer (fraction over caloparticles)
80  minEneClperlay_(pset.getParameter<double>("minEneClperlay")),
81  maxEneClperlay_(pset.getParameter<double>("maxEneClperlay")),
82  nintEneClperlay_(pset.getParameter<int>("nintEneClperlay")),
83 
84  //Parameters for the score both for:
85  //1. calo particle to layer clusters association per layer
86  //2. layer cluster to calo particles association per layer
87  minScore_(pset.getParameter<double>("minScore")),
88  maxScore_(pset.getParameter<double>("maxScore")),
89  nintScore_(pset.getParameter<int>("nintScore")),
90 
91  //Parameters for shared energy fraction. That is:
92  //1. Fraction of each of the layer clusters energy related to a
93  //calo particle over that calo particle's energy.
94  //2. Fraction of each of the calo particles energy
95  //related to a layer cluster over that layer cluster's energy.
96  minSharedEneFrac_(pset.getParameter<double>("minSharedEneFrac")),
97  maxSharedEneFrac_(pset.getParameter<double>("maxSharedEneFrac")),
98  nintSharedEneFrac_(pset.getParameter<int>("nintSharedEneFrac")),
99 
100  //Same as above for multiclusters
101  minMCLSharedEneFrac_(pset.getParameter<double>("minMCLSharedEneFrac")),
102  maxMCLSharedEneFrac_(pset.getParameter<double>("maxMCLSharedEneFrac")),
103  nintMCLSharedEneFrac_(pset.getParameter<int>("nintMCLSharedEneFrac")),
104 
105  //Parameters for the total number of simclusters per thickness
106  minTotNsimClsperthick_(pset.getParameter<double>("minTotNsimClsperthick")),
107  maxTotNsimClsperthick_(pset.getParameter<double>("maxTotNsimClsperthick")),
108  nintTotNsimClsperthick_(pset.getParameter<int>("nintTotNsimClsperthick")),
109 
110  //Parameters for the total number of layer clusters per thickness
111  minTotNClsperthick_(pset.getParameter<double>("minTotNClsperthick")),
112  maxTotNClsperthick_(pset.getParameter<double>("maxTotNClsperthick")),
113  nintTotNClsperthick_(pset.getParameter<int>("nintTotNClsperthick")),
114 
115  //Parameters for the total number of cells per per thickness per layer
116  minTotNcellsperthickperlayer_(pset.getParameter<double>("minTotNcellsperthickperlayer")),
117  maxTotNcellsperthickperlayer_(pset.getParameter<double>("maxTotNcellsperthickperlayer")),
118  nintTotNcellsperthickperlayer_(pset.getParameter<int>("nintTotNcellsperthickperlayer")),
119 
120  //Parameters for the distance of cluster cells to seed cell per thickness per layer
121  minDisToSeedperthickperlayer_(pset.getParameter<double>("minDisToSeedperthickperlayer")),
122  maxDisToSeedperthickperlayer_(pset.getParameter<double>("maxDisToSeedperthickperlayer")),
123  nintDisToSeedperthickperlayer_(pset.getParameter<int>("nintDisToSeedperthickperlayer")),
124 
125  //Parameters for the energy weighted distance of cluster cells to seed cell per thickness per layer
126  minDisToSeedperthickperlayerenewei_(pset.getParameter<double>("minDisToSeedperthickperlayerenewei")),
127  maxDisToSeedperthickperlayerenewei_(pset.getParameter<double>("maxDisToSeedperthickperlayerenewei")),
128  nintDisToSeedperthickperlayerenewei_(pset.getParameter<int>("nintDisToSeedperthickperlayerenewei")),
129 
130  //Parameters for the distance of cluster cells to max cell per thickness per layer
131  minDisToMaxperthickperlayer_(pset.getParameter<double>("minDisToMaxperthickperlayer")),
132  maxDisToMaxperthickperlayer_(pset.getParameter<double>("maxDisToMaxperthickperlayer")),
133  nintDisToMaxperthickperlayer_(pset.getParameter<int>("nintDisToMaxperthickperlayer")),
134 
135  //Parameters for the energy weighted distance of cluster cells to max cell per thickness per layer
136  minDisToMaxperthickperlayerenewei_(pset.getParameter<double>("minDisToMaxperthickperlayerenewei")),
137  maxDisToMaxperthickperlayerenewei_(pset.getParameter<double>("maxDisToMaxperthickperlayerenewei")),
138  nintDisToMaxperthickperlayerenewei_(pset.getParameter<int>("nintDisToMaxperthickperlayerenewei")),
139 
140  //Parameters for the distance of seed cell to max cell per thickness per layer
141  minDisSeedToMaxperthickperlayer_(pset.getParameter<double>("minDisSeedToMaxperthickperlayer")),
142  maxDisSeedToMaxperthickperlayer_(pset.getParameter<double>("maxDisSeedToMaxperthickperlayer")),
143  nintDisSeedToMaxperthickperlayer_(pset.getParameter<int>("nintDisSeedToMaxperthickperlayer")),
144 
145  //Parameters for the energy of a cluster per thickness per layer
146  minClEneperthickperlayer_(pset.getParameter<double>("minClEneperthickperlayer")),
147  maxClEneperthickperlayer_(pset.getParameter<double>("maxClEneperthickperlayer")),
148  nintClEneperthickperlayer_(pset.getParameter<int>("nintClEneperthickperlayer")),
149 
150  //Parameters for the energy density of cluster cells per thickness
151  minCellsEneDensperthick_(pset.getParameter<double>("minCellsEneDensperthick")),
152  maxCellsEneDensperthick_(pset.getParameter<double>("maxCellsEneDensperthick")),
153  nintCellsEneDensperthick_(pset.getParameter<int>("nintCellsEneDensperthick")),
154 
155  //Parameters for the total number of multiclusters per event
156  //We always treet one event as two events, one in +z one in -z
157  minTotNMCLs_(pset.getParameter<double>("minTotNMCLs")),
158  maxTotNMCLs_(pset.getParameter<double>("maxTotNMCLs")),
159  nintTotNMCLs_(pset.getParameter<int>("nintTotNMCLs")),
160 
161  //Parameters for the total number of layer clusters in multicluster
162  minTotNClsinMCLs_(pset.getParameter<double>("minTotNClsinMCLs")),
163  maxTotNClsinMCLs_(pset.getParameter<double>("maxTotNClsinMCLs")),
164  nintTotNClsinMCLs_(pset.getParameter<int>("nintTotNClsinMCLs")),
165 
166  //Parameters for the total number of layer clusters in multicluster per layer
167  minTotNClsinMCLsperlayer_(pset.getParameter<double>("minTotNClsinMCLsperlayer")),
168  maxTotNClsinMCLsperlayer_(pset.getParameter<double>("maxTotNClsinMCLsperlayer")),
169  nintTotNClsinMCLsperlayer_(pset.getParameter<int>("nintTotNClsinMCLsperlayer")),
170 
171  //Parameters for the multiplicity of layer clusters in multicluster
172  minMplofLCs_(pset.getParameter<double>("minMplofLCs")),
173  maxMplofLCs_(pset.getParameter<double>("maxMplofLCs")),
174  nintMplofLCs_(pset.getParameter<int>("nintMplofLCs")),
175 
176  //Parameters for cluster size
177  minSizeCLsinMCLs_(pset.getParameter<double>("minSizeCLsinMCLs")),
178  maxSizeCLsinMCLs_(pset.getParameter<double>("maxSizeCLsinMCLs")),
179  nintSizeCLsinMCLs_(pset.getParameter<int>("nintSizeCLsinMCLs")),
180 
181  //Parameters for the energy of a cluster per thickness per layer
182  minClEnepermultiplicity_(pset.getParameter<double>("minClEnepermultiplicity")),
183  maxClEnepermultiplicity_(pset.getParameter<double>("maxClEnepermultiplicity")),
184  nintClEnepermultiplicity_(pset.getParameter<int>("nintClEnepermultiplicity")),
185 
186  //parameters for x
187  minX_(pset.getParameter<double>("minX")),
188  maxX_(pset.getParameter<double>("maxX")),
189  nintX_(pset.getParameter<int>("nintX")),
190 
191  //parameters for y
192  minY_(pset.getParameter<double>("minY")),
193  maxY_(pset.getParameter<double>("maxY")),
194  nintY_(pset.getParameter<int>("nintY")),
195 
196  //parameters for z
197  minZ_(pset.getParameter<double>("minZ")),
198  maxZ_(pset.getParameter<double>("maxZ")),
199  nintZ_(pset.getParameter<int>("nintZ")) {}
200 
202 
204  histograms.lastLayerEEzm = ibook.bookInt("lastLayerEEzm");
205  histograms.lastLayerFHzm = ibook.bookInt("lastLayerFHzm");
206  histograms.maxlayerzm = ibook.bookInt("maxlayerzm");
207  histograms.lastLayerEEzp = ibook.bookInt("lastLayerEEzp");
208  histograms.lastLayerFHzp = ibook.bookInt("lastLayerFHzp");
209  histograms.maxlayerzp = ibook.bookInt("maxlayerzp");
210 }
211 
214  int pdgid,
215  unsigned int layers) {
216  histograms.h_caloparticle_eta[pdgid] =
217  ibook.book1D("N of caloparticle vs eta", "N of caloparticle vs eta", nintEta_, minEta_, maxEta_);
218  histograms.h_caloparticle_eta_Zorigin[pdgid] =
219  ibook.book2D("Eta vs Zorigin", "Eta vs Zorigin", nintEta_, minEta_, maxEta_, nintZpos_, minZpos_, maxZpos_);
220 
221  histograms.h_caloparticle_energy[pdgid] =
222  ibook.book1D("Energy", "Energy of caloparticle", nintEne_, minEne_, maxEne_);
223  histograms.h_caloparticle_pt[pdgid] = ibook.book1D("Pt", "Pt of caloparticle", nintPt_, minPt_, maxPt_);
224  histograms.h_caloparticle_phi[pdgid] = ibook.book1D("Phi", "Phi of caloparticle", nintPhi_, minPhi_, maxPhi_);
225  histograms.h_caloparticle_selfenergy[pdgid] =
226  ibook.book1D("SelfEnergy", "Total Energy of Hits in Sim Clusters (matched)", nintEne_, minEne_, maxEne_);
227  histograms.h_caloparticle_energyDifference[pdgid] =
228  ibook.book1D("EnergyDifference", "(Energy-SelfEnergy)/Energy", 300., -5., 1.);
229 
230  histograms.h_caloparticle_nSimClusters[pdgid] =
231  ibook.book1D("Num Sim Clusters", "Num Sim Clusters in caloparticle", 100, 0., 100.);
232  histograms.h_caloparticle_nHitsInSimClusters[pdgid] =
233  ibook.book1D("Num Hits in Sim Clusters", "Num Hits in Sim Clusters in caloparticle", 1000, 0., 1000.);
234  histograms.h_caloparticle_nHitsInSimClusters_matchedtoRecHit[pdgid] = ibook.book1D(
235  "Num Rec-matched Hits in Sim Clusters", "Num Hits in Sim Clusters (matched) in caloparticle", 1000, 0., 1000.);
236 
237  histograms.h_caloparticle_nHits_matched_energy[pdgid] =
238  ibook.book1D("Energy of Rec-matched Hits", "Energy of Hits in Sim Clusters (matched)", 100, 0., 10.);
239  histograms.h_caloparticle_nHits_matched_energy_layer[pdgid] =
240  ibook.book2D("Energy of Rec-matched Hits vs layer",
241  "Energy of Hits in Sim Clusters (matched) vs layer",
242  2 * layers,
243  0.,
244  (float)2 * layers,
245  100,
246  0.,
247  10.);
248  histograms.h_caloparticle_nHits_matched_energy_layer_1SimCl[pdgid] =
249  ibook.book2D("Energy of Rec-matched Hits vs layer (1SC)",
250  "Energy of Hits only 1 Sim Clusters (matched) vs layer",
251  2 * layers,
252  0.,
253  (float)2 * layers,
254  100,
255  0.,
256  10.);
257  histograms.h_caloparticle_sum_energy_layer[pdgid] =
258  ibook.book2D("Rec-matched Hits Sum Energy vs layer",
259  "Rescaled Sum Energy of Hits in Sim Clusters (matched) vs layer",
260  2 * layers,
261  0.,
262  (float)2 * layers,
263  110,
264  0.,
265  110.);
266 
267  histograms.h_caloparticle_firstlayer[pdgid] =
268  ibook.book1D("First Layer", "First layer of the caloparticle", 2 * layers, 0., (float)2 * layers);
269  histograms.h_caloparticle_lastlayer[pdgid] =
270  ibook.book1D("Last Layer", "Last layer of the caloparticle", 2 * layers, 0., (float)2 * layers);
271  histograms.h_caloparticle_layersnum[pdgid] =
272  ibook.book1D("Number of Layers", "Number of layers of the caloparticle", 2 * layers, 0., (float)2 * layers);
273  histograms.h_caloparticle_firstlayer_matchedtoRecHit[pdgid] = ibook.book1D(
274  "First Layer (rec-matched hit)", "First layer of the caloparticle (matched)", 2 * layers, 0., (float)2 * layers);
275  histograms.h_caloparticle_lastlayer_matchedtoRecHit[pdgid] = ibook.book1D(
276  "Last Layer (rec-matched hit)", "Last layer of the caloparticle (matched)", 2 * layers, 0., (float)2 * layers);
277  histograms.h_caloparticle_layersnum_matchedtoRecHit[pdgid] =
278  ibook.book1D("Number of Layers (rec-matched hit)",
279  "Number of layers of the caloparticle (matched)",
280  2 * layers,
281  0.,
282  (float)2 * layers);
283 }
284 
287  unsigned int layers,
288  std::vector<int> thicknesses) {
289  //---------------------------------------------------------------------------------------------------------------------------
290  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
291  auto istr1 = std::to_string(ilayer);
292  while (istr1.size() < 2) {
293  istr1.insert(0, "0");
294  }
295  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
296  std::string istr2 = "";
297  //First with the -z endcap
298  if (ilayer < layers) {
299  istr2 = std::to_string(ilayer + 1) + " in z-";
300  } else { //Then for the +z
301  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
302  }
303  histograms.h_simclusternum_perlayer[ilayer] = ibook.book1D("totsimclusternum_layer_" + istr1,
304  "total number of SimClusters for layer " + istr2,
308 
309  } //end of loop over layers
310  //---------------------------------------------------------------------------------------------------------------------------
311  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
312  auto istr = std::to_string(*it);
313  histograms.h_simclusternum_perthick[(*it)] = ibook.book1D("totsimclusternum_thick_" + istr,
314  "total number of simclusters for thickness " + istr,
318  } //end of loop over thicknesses
319 
320  //---------------------------------------------------------------------------------------------------------------------------
321  //z-
322  histograms.h_mixedhitssimcluster_zminus =
323  ibook.book1D("mixedhitssimcluster_zminus",
324  "N of simclusters that contain hits of more than one kind in z-",
328  //z+
329  histograms.h_mixedhitssimcluster_zplus =
330  ibook.book1D("mixedhitssimcluster_zplus",
331  "N of simclusters that contain hits of more than one kind in z+",
335 }
336 
339  unsigned int layers,
340  std::vector<int> thicknesses) {
341  std::unordered_map<int, dqm::reco::MonitorElement*> denom_layercl_in_simcl_eta_perlayer;
342  denom_layercl_in_simcl_eta_perlayer.clear();
343  std::unordered_map<int, dqm::reco::MonitorElement*> denom_layercl_in_simcl_phi_perlayer;
344  denom_layercl_in_simcl_phi_perlayer.clear();
345  std::unordered_map<int, dqm::reco::MonitorElement*> score_layercl2simcluster_perlayer;
346  score_layercl2simcluster_perlayer.clear();
347  std::unordered_map<int, dqm::reco::MonitorElement*> sharedenergy_layercl2simcluster_perlayer;
348  sharedenergy_layercl2simcluster_perlayer.clear();
349  std::unordered_map<int, dqm::reco::MonitorElement*> energy_vs_score_layercl2simcluster_perlayer;
350  energy_vs_score_layercl2simcluster_perlayer.clear();
351  std::unordered_map<int, dqm::reco::MonitorElement*> num_layercl_in_simcl_eta_perlayer;
352  num_layercl_in_simcl_eta_perlayer.clear();
353  std::unordered_map<int, dqm::reco::MonitorElement*> num_layercl_in_simcl_phi_perlayer;
354  num_layercl_in_simcl_phi_perlayer.clear();
355  std::unordered_map<int, dqm::reco::MonitorElement*> numMerge_layercl_in_simcl_eta_perlayer;
356  numMerge_layercl_in_simcl_eta_perlayer.clear();
357  std::unordered_map<int, dqm::reco::MonitorElement*> numMerge_layercl_in_simcl_phi_perlayer;
358  numMerge_layercl_in_simcl_phi_perlayer.clear();
359  std::unordered_map<int, dqm::reco::MonitorElement*> sharedenergy_layercl2simcluster_vs_eta_perlayer;
360  sharedenergy_layercl2simcluster_vs_eta_perlayer.clear();
361  std::unordered_map<int, dqm::reco::MonitorElement*> sharedenergy_layercl2simcluster_vs_phi_perlayer;
362  sharedenergy_layercl2simcluster_vs_phi_perlayer.clear();
363  std::unordered_map<int, dqm::reco::MonitorElement*> denom_simcluster_eta_perlayer;
364  denom_simcluster_eta_perlayer.clear();
365  std::unordered_map<int, dqm::reco::MonitorElement*> denom_simcluster_phi_perlayer;
366  denom_simcluster_phi_perlayer.clear();
367  std::unordered_map<int, dqm::reco::MonitorElement*> score_simcluster2layercl_perlayer;
368  score_simcluster2layercl_perlayer.clear();
369  std::unordered_map<int, dqm::reco::MonitorElement*> sharedenergy_simcluster2layercl_perlayer;
370  sharedenergy_simcluster2layercl_perlayer.clear();
371  std::unordered_map<int, dqm::reco::MonitorElement*> energy_vs_score_simcluster2layercl_perlayer;
372  energy_vs_score_simcluster2layercl_perlayer.clear();
373  std::unordered_map<int, dqm::reco::MonitorElement*> num_simcluster_eta_perlayer;
374  num_simcluster_eta_perlayer.clear();
375  std::unordered_map<int, dqm::reco::MonitorElement*> num_simcluster_phi_perlayer;
376  num_simcluster_phi_perlayer.clear();
377  std::unordered_map<int, dqm::reco::MonitorElement*> numDup_simcluster_eta_perlayer;
378  numDup_simcluster_eta_perlayer.clear();
379  std::unordered_map<int, dqm::reco::MonitorElement*> numDup_simcluster_phi_perlayer;
380  numDup_simcluster_phi_perlayer.clear();
381  std::unordered_map<int, dqm::reco::MonitorElement*> sharedenergy_simcluster2layercl_vs_eta_perlayer;
382  sharedenergy_simcluster2layercl_vs_eta_perlayer.clear();
383  std::unordered_map<int, dqm::reco::MonitorElement*> sharedenergy_simcluster2layercl_vs_phi_perlayer;
384  sharedenergy_simcluster2layercl_vs_phi_perlayer.clear();
385 
386  //---------------------------------------------------------------------------------------------------------------------------
387  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
388  auto istr1 = std::to_string(ilayer);
389  while (istr1.size() < 2) {
390  istr1.insert(0, "0");
391  }
392  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
393  std::string istr2 = "";
394  //First with the -z endcap
395  if (ilayer < layers) {
396  istr2 = std::to_string(ilayer + 1) + " in z-";
397  } else { //Then for the +z
398  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
399  }
400  //-------------------------------------------------------------------------------------------------------------------------
401  denom_layercl_in_simcl_eta_perlayer[ilayer] =
402  ibook.book1D("Denom_LayerCluster_in_SimCluster_Eta_perlayer" + istr1,
403  "Denom LayerCluster in SimCluster Eta per Layer Cluster for layer " + istr2,
404  nintEta_,
405  minEta_,
406  maxEta_);
407  //-------------------------------------------------------------------------------------------------------------------------
408  denom_layercl_in_simcl_phi_perlayer[ilayer] =
409  ibook.book1D("Denom_LayerCluster_in_SimCluster_Phi_perlayer" + istr1,
410  "Denom LayerCluster in SimCluster Phi per Layer Cluster for layer " + istr2,
411  nintPhi_,
412  minPhi_,
413  maxPhi_);
414  //-------------------------------------------------------------------------------------------------------------------------
415  score_layercl2simcluster_perlayer[ilayer] = ibook.book1D("Score_layercl2simcluster_perlayer" + istr1,
416  "Score of Layer Cluster per SimCluster for layer " + istr2,
417  nintScore_,
418  minScore_,
419  maxScore_);
420  //-------------------------------------------------------------------------------------------------------------------------
421  score_simcluster2layercl_perlayer[ilayer] = ibook.book1D("Score_simcluster2layercl_perlayer" + istr1,
422  "Score of SimCluster per Layer Cluster for layer " + istr2,
423  nintScore_,
424  minScore_,
425  maxScore_);
426  //-------------------------------------------------------------------------------------------------------------------------
427  energy_vs_score_simcluster2layercl_perlayer[ilayer] =
428  ibook.book2D("Energy_vs_Score_simcluster2layer_perlayer" + istr1,
429  "Energy vs Score of SimCluster per Layer Cluster for layer " + istr2,
430  nintScore_,
431  minScore_,
432  maxScore_,
436  //-------------------------------------------------------------------------------------------------------------------------
437  energy_vs_score_layercl2simcluster_perlayer[ilayer] =
438  ibook.book2D("Energy_vs_Score_layer2simcluster_perlayer" + istr1,
439  "Energy vs Score of Layer Cluster per SimCluster for layer " + istr2,
440  nintScore_,
441  minScore_,
442  maxScore_,
446  //-------------------------------------------------------------------------------------------------------------------------
447  sharedenergy_simcluster2layercl_perlayer[ilayer] =
448  ibook.book1D("SharedEnergy_simcluster2layercl_perlayer" + istr1,
449  "Shared Energy of SimCluster per Layer Cluster for layer " + istr2,
453  //-------------------------------------------------------------------------------------------------------------------------
454  sharedenergy_simcluster2layercl_vs_eta_perlayer[ilayer] =
455  ibook.bookProfile("SharedEnergy_simcluster2layercl_vs_eta_perlayer" + istr1,
456  "Shared Energy of SimCluster vs #eta per best Layer Cluster for layer " + istr2,
457  nintEta_,
458  minEta_,
459  maxEta_,
462  //-------------------------------------------------------------------------------------------------------------------------
463  sharedenergy_simcluster2layercl_vs_phi_perlayer[ilayer] =
464  ibook.bookProfile("SharedEnergy_simcluster2layercl_vs_phi_perlayer" + istr1,
465  "Shared Energy of SimCluster vs #phi per best Layer Cluster for layer " + istr2,
466  nintPhi_,
467  minPhi_,
468  maxPhi_,
471  //-------------------------------------------------------------------------------------------------------------------------
472  sharedenergy_layercl2simcluster_perlayer[ilayer] =
473  ibook.book1D("SharedEnergy_layercluster2simcluster_perlayer" + istr1,
474  "Shared Energy of Layer Cluster per SimCluster for layer " + istr2,
478  //-------------------------------------------------------------------------------------------------------------------------
479  sharedenergy_layercl2simcluster_vs_eta_perlayer[ilayer] =
480  ibook.bookProfile("SharedEnergy_layercl2simcluster_vs_eta_perlayer" + istr1,
481  "Shared Energy of LayerCluster vs #eta per best SimCluster for layer " + istr2,
482  nintEta_,
483  minEta_,
484  maxEta_,
487  //-------------------------------------------------------------------------------------------------------------------------
488  sharedenergy_layercl2simcluster_vs_phi_perlayer[ilayer] =
489  ibook.bookProfile("SharedEnergy_layercl2simcluster_vs_phi_perlayer" + istr1,
490  "Shared Energy of LayerCluster vs #phi per best SimCluster for layer " + istr2,
491  nintPhi_,
492  minPhi_,
493  maxPhi_,
496  //-------------------------------------------------------------------------------------------------------------------------
497  num_simcluster_eta_perlayer[ilayer] = ibook.book1D("Num_SimCluster_Eta_perlayer" + istr1,
498  "Num SimCluster Eta per Layer Cluster for layer " + istr2,
499  nintEta_,
500  minEta_,
501  maxEta_);
502  //-------------------------------------------------------------------------------------------------------------------------
503  numDup_simcluster_eta_perlayer[ilayer] =
504  ibook.book1D("NumDup_SimCluster_Eta_perlayer" + istr1,
505  "Num Duplicate SimCluster Eta per Layer Cluster for layer " + istr2,
506  nintEta_,
507  minEta_,
508  maxEta_);
509  //-------------------------------------------------------------------------------------------------------------------------
510  denom_simcluster_eta_perlayer[ilayer] = ibook.book1D("Denom_SimCluster_Eta_perlayer" + istr1,
511  "Denom SimCluster Eta per Layer Cluster for layer " + istr2,
512  nintEta_,
513  minEta_,
514  maxEta_);
515  //-------------------------------------------------------------------------------------------------------------------------
516  num_simcluster_phi_perlayer[ilayer] = ibook.book1D("Num_SimCluster_Phi_perlayer" + istr1,
517  "Num SimCluster Phi per Layer Cluster for layer " + istr2,
518  nintPhi_,
519  minPhi_,
520  maxPhi_);
521  //-------------------------------------------------------------------------------------------------------------------------
522  numDup_simcluster_phi_perlayer[ilayer] =
523  ibook.book1D("NumDup_SimCluster_Phi_perlayer" + istr1,
524  "Num Duplicate SimCluster Phi per Layer Cluster for layer " + istr2,
525  nintPhi_,
526  minPhi_,
527  maxPhi_);
528  //-------------------------------------------------------------------------------------------------------------------------
529  denom_simcluster_phi_perlayer[ilayer] = ibook.book1D("Denom_SimCluster_Phi_perlayer" + istr1,
530  "Denom SimCluster Phi per Layer Cluster for layer " + istr2,
531  nintPhi_,
532  minPhi_,
533  maxPhi_);
534  //-------------------------------------------------------------------------------------------------------------------------
535  num_layercl_in_simcl_eta_perlayer[ilayer] =
536  ibook.book1D("Num_LayerCluster_in_SimCluster_Eta_perlayer" + istr1,
537  "Num LayerCluster Eta per Layer Cluster in SimCluster for layer " + istr2,
538  nintEta_,
539  minEta_,
540  maxEta_);
541  //-------------------------------------------------------------------------------------------------------------------------
542  numMerge_layercl_in_simcl_eta_perlayer[ilayer] =
543  ibook.book1D("NumMerge_LayerCluster_in_SimCluster_Eta_perlayer" + istr1,
544  "Num Merge LayerCluster Eta per Layer Cluster in SimCluster for layer " + istr2,
545  nintEta_,
546  minEta_,
547  maxEta_);
548  //-------------------------------------------------------------------------------------------------------------------------
549  num_layercl_in_simcl_phi_perlayer[ilayer] =
550  ibook.book1D("Num_LayerCluster_in_SimCluster_Phi_perlayer" + istr1,
551  "Num LayerCluster Phi per Layer Cluster in SimCluster for layer " + istr2,
552  nintPhi_,
553  minPhi_,
554  maxPhi_);
555  //-------------------------------------------------------------------------------------------------------------------------
556  numMerge_layercl_in_simcl_phi_perlayer[ilayer] =
557  ibook.book1D("NumMerge_LayerCluster_in_SimCluster_Phi_perlayer" + istr1,
558  "Num Merge LayerCluster Phi per Layer Cluster in SimCluster for layer " + istr2,
559  nintPhi_,
560  minPhi_,
561  maxPhi_);
562 
563  } //end of loop over layers
564 
565  histograms.h_denom_layercl_in_simcl_eta_perlayer.push_back(std::move(denom_layercl_in_simcl_eta_perlayer));
566  histograms.h_denom_layercl_in_simcl_phi_perlayer.push_back(std::move(denom_layercl_in_simcl_phi_perlayer));
567  histograms.h_score_layercl2simcluster_perlayer.push_back(std::move(score_layercl2simcluster_perlayer));
568  histograms.h_sharedenergy_layercl2simcluster_perlayer.push_back(std::move(sharedenergy_layercl2simcluster_perlayer));
569  histograms.h_energy_vs_score_layercl2simcluster_perlayer.push_back(
570  std::move(energy_vs_score_layercl2simcluster_perlayer));
571  histograms.h_num_layercl_in_simcl_eta_perlayer.push_back(std::move(num_layercl_in_simcl_eta_perlayer));
572  histograms.h_num_layercl_in_simcl_phi_perlayer.push_back(std::move(num_layercl_in_simcl_phi_perlayer));
573  histograms.h_numMerge_layercl_in_simcl_eta_perlayer.push_back(std::move(numMerge_layercl_in_simcl_eta_perlayer));
574  histograms.h_numMerge_layercl_in_simcl_phi_perlayer.push_back(std::move(numMerge_layercl_in_simcl_phi_perlayer));
575  histograms.h_sharedenergy_layercl2simcluster_vs_eta_perlayer.push_back(
576  std::move(sharedenergy_layercl2simcluster_vs_eta_perlayer));
577  histograms.h_sharedenergy_layercl2simcluster_vs_phi_perlayer.push_back(
578  std::move(sharedenergy_layercl2simcluster_vs_phi_perlayer));
579  histograms.h_denom_simcluster_eta_perlayer.push_back(std::move(denom_simcluster_eta_perlayer));
580  histograms.h_denom_simcluster_phi_perlayer.push_back(std::move(denom_simcluster_phi_perlayer));
581  histograms.h_score_simcluster2layercl_perlayer.push_back(std::move(score_simcluster2layercl_perlayer));
582  histograms.h_sharedenergy_simcluster2layercl_perlayer.push_back(std::move(sharedenergy_simcluster2layercl_perlayer));
583  histograms.h_energy_vs_score_simcluster2layercl_perlayer.push_back(
584  std::move(energy_vs_score_simcluster2layercl_perlayer));
585  histograms.h_num_simcluster_eta_perlayer.push_back(std::move(num_simcluster_eta_perlayer));
586  histograms.h_num_simcluster_phi_perlayer.push_back(std::move(num_simcluster_phi_perlayer));
587  histograms.h_numDup_simcluster_eta_perlayer.push_back(std::move(numDup_simcluster_eta_perlayer));
588  histograms.h_numDup_simcluster_phi_perlayer.push_back(std::move(numDup_simcluster_phi_perlayer));
589  histograms.h_sharedenergy_simcluster2layercl_vs_eta_perlayer.push_back(
590  std::move(sharedenergy_simcluster2layercl_vs_eta_perlayer));
591  histograms.h_sharedenergy_simcluster2layercl_vs_phi_perlayer.push_back(
592  std::move(sharedenergy_simcluster2layercl_vs_phi_perlayer));
593 }
596  unsigned int layers,
597  std::vector<int> thicknesses,
598  std::string pathtomatbudfile) {
599  //---------------------------------------------------------------------------------------------------------------------------
600  histograms.h_cluster_eta.push_back(
601  ibook.book1D("num_reco_cluster_eta", "N of reco clusters vs eta", nintEta_, minEta_, maxEta_));
602 
603  //---------------------------------------------------------------------------------------------------------------------------
604  //z-
605  histograms.h_mixedhitscluster_zminus.push_back(
606  ibook.book1D("mixedhitscluster_zminus",
607  "N of reco clusters that contain hits of more than one kind in z-",
611  //z+
612  histograms.h_mixedhitscluster_zplus.push_back(
613  ibook.book1D("mixedhitscluster_zplus",
614  "N of reco clusters that contain hits of more than one kind in z+",
618 
619  //---------------------------------------------------------------------------------------------------------------------------
620  //z-
621  histograms.h_energyclustered_zminus.push_back(
622  ibook.book1D("energyclustered_zminus",
623  "percent of total energy clustered by all layer clusters over caloparticles energy in z-",
624  nintEneCl_,
625  minEneCl_,
626  maxEneCl_));
627  //z+
628  histograms.h_energyclustered_zplus.push_back(
629  ibook.book1D("energyclustered_zplus",
630  "percent of total energy clustered by all layer clusters over caloparticles energy in z+",
631  nintEneCl_,
632  minEneCl_,
633  maxEneCl_));
634 
635  //---------------------------------------------------------------------------------------------------------------------------
636  //z-
637  std::string subpathtomat = pathtomatbudfile.substr(pathtomatbudfile.find("Validation"));
638  histograms.h_longdepthbarycentre_zminus.push_back(
639  ibook.book1D("longdepthbarycentre_zminus",
640  "The longitudinal depth barycentre in z- for " + subpathtomat,
643  maxLongDepBary_));
644  //z+
645  histograms.h_longdepthbarycentre_zplus.push_back(
646  ibook.book1D("longdepthbarycentre_zplus",
647  "The longitudinal depth barycentre in z+ for " + subpathtomat,
650  maxLongDepBary_));
651 
652  //---------------------------------------------------------------------------------------------------------------------------
653  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
654  auto istr1 = std::to_string(ilayer);
655  while (istr1.size() < 2) {
656  istr1.insert(0, "0");
657  }
658  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
659  std::string istr2 = "";
660  //First with the -z endcap
661  if (ilayer < layers) {
662  istr2 = std::to_string(ilayer + 1) + " in z-";
663  } else { //Then for the +z
664  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
665  }
666  histograms.h_clusternum_perlayer[ilayer] = ibook.book1D("totclusternum_layer_" + istr1,
667  "total number of layer clusters for layer " + istr2,
671  histograms.h_energyclustered_perlayer[ilayer] =
672  ibook.book1D("energyclustered_perlayer" + istr1,
673  "percent of total energy clustered by layer clusters over caloparticles energy for layer " + istr2,
677  }
678 
679  //---------------------------------------------------------------------------------------------------------------------------
680  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
681  auto istr = std::to_string(*it);
682  histograms.h_clusternum_perthick[(*it)] = ibook.book1D("totclusternum_thick_" + istr,
683  "total number of layer clusters for thickness " + istr,
687  }
688  //---------------------------------------------------------------------------------------------------------------------------
689 }
690 
693  unsigned int layers) {
694  //----------------------------------------------------------------------------------------------------------------------------
695  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
696  auto istr1 = std::to_string(ilayer);
697  while (istr1.size() < 2) {
698  istr1.insert(0, "0");
699  }
700  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
701  std::string istr2 = "";
702  //First with the -z endcap
703  if (ilayer < layers) {
704  istr2 = std::to_string(ilayer + 1) + " in z-";
705  } else { //Then for the +z
706  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
707  }
708  histograms.h_score_layercl2caloparticle_perlayer[ilayer] =
709  ibook.book1D("Score_layercl2caloparticle_perlayer" + istr1,
710  "Score of Layer Cluster per CaloParticle for layer " + istr2,
711  nintScore_,
712  minScore_,
713  maxScore_);
714  histograms.h_score_caloparticle2layercl_perlayer[ilayer] =
715  ibook.book1D("Score_caloparticle2layercl_perlayer" + istr1,
716  "Score of CaloParticle per Layer Cluster for layer " + istr2,
717  nintScore_,
718  minScore_,
719  maxScore_);
720  histograms.h_energy_vs_score_caloparticle2layercl_perlayer[ilayer] =
721  ibook.book2D("Energy_vs_Score_caloparticle2layer_perlayer" + istr1,
722  "Energy vs Score of CaloParticle per Layer Cluster for layer " + istr2,
723  nintScore_,
724  minScore_,
725  maxScore_,
729  histograms.h_energy_vs_score_layercl2caloparticle_perlayer[ilayer] =
730  ibook.book2D("Energy_vs_Score_layer2caloparticle_perlayer" + istr1,
731  "Energy vs Score of Layer Cluster per CaloParticle Layer for layer " + istr2,
732  nintScore_,
733  minScore_,
734  maxScore_,
738  histograms.h_sharedenergy_caloparticle2layercl_perlayer[ilayer] =
739  ibook.book1D("SharedEnergy_caloparticle2layercl_perlayer" + istr1,
740  "Shared Energy of CaloParticle per Layer Cluster for layer " + istr2,
744  histograms.h_sharedenergy_caloparticle2layercl_vs_eta_perlayer[ilayer] =
745  ibook.bookProfile("SharedEnergy_caloparticle2layercl_vs_eta_perlayer" + istr1,
746  "Shared Energy of CaloParticle vs #eta per best Layer Cluster for layer " + istr2,
747  nintEta_,
748  minEta_,
749  maxEta_,
752  histograms.h_sharedenergy_caloparticle2layercl_vs_phi_perlayer[ilayer] =
753  ibook.bookProfile("SharedEnergy_caloparticle2layercl_vs_phi_perlayer" + istr1,
754  "Shared Energy of CaloParticle vs #phi per best Layer Cluster for layer " + istr2,
755  nintPhi_,
756  minPhi_,
757  maxPhi_,
760  histograms.h_sharedenergy_layercl2caloparticle_perlayer[ilayer] =
761  ibook.book1D("SharedEnergy_layercluster2caloparticle_perlayer" + istr1,
762  "Shared Energy of Layer Cluster per Layer Calo Particle for layer " + istr2,
766  histograms.h_sharedenergy_layercl2caloparticle_vs_eta_perlayer[ilayer] =
767  ibook.bookProfile("SharedEnergy_layercl2caloparticle_vs_eta_perlayer" + istr1,
768  "Shared Energy of LayerCluster vs #eta per best Calo Particle for layer " + istr2,
769  nintEta_,
770  minEta_,
771  maxEta_,
774  histograms.h_sharedenergy_layercl2caloparticle_vs_phi_perlayer[ilayer] =
775  ibook.bookProfile("SharedEnergy_layercl2caloparticle_vs_phi_perlayer" + istr1,
776  "Shared Energy of LayerCluster vs #phi per best Calo Particle for layer " + istr2,
777  nintPhi_,
778  minPhi_,
779  maxPhi_,
782  histograms.h_num_caloparticle_eta_perlayer[ilayer] =
783  ibook.book1D("Num_CaloParticle_Eta_perlayer" + istr1,
784  "Num CaloParticle Eta per Layer Cluster for layer " + istr2,
785  nintEta_,
786  minEta_,
787  maxEta_);
788  histograms.h_numDup_caloparticle_eta_perlayer[ilayer] =
789  ibook.book1D("NumDup_CaloParticle_Eta_perlayer" + istr1,
790  "Num Duplicate CaloParticle Eta per Layer Cluster for layer " + istr2,
791  nintEta_,
792  minEta_,
793  maxEta_);
794  histograms.h_denom_caloparticle_eta_perlayer[ilayer] =
795  ibook.book1D("Denom_CaloParticle_Eta_perlayer" + istr1,
796  "Denom CaloParticle Eta per Layer Cluster for layer " + istr2,
797  nintEta_,
798  minEta_,
799  maxEta_);
800  histograms.h_num_caloparticle_phi_perlayer[ilayer] =
801  ibook.book1D("Num_CaloParticle_Phi_perlayer" + istr1,
802  "Num CaloParticle Phi per Layer Cluster for layer " + istr2,
803  nintPhi_,
804  minPhi_,
805  maxPhi_);
806  histograms.h_numDup_caloparticle_phi_perlayer[ilayer] =
807  ibook.book1D("NumDup_CaloParticle_Phi_perlayer" + istr1,
808  "Num Duplicate CaloParticle Phi per Layer Cluster for layer " + istr2,
809  nintPhi_,
810  minPhi_,
811  maxPhi_);
812  histograms.h_denom_caloparticle_phi_perlayer[ilayer] =
813  ibook.book1D("Denom_CaloParticle_Phi_perlayer" + istr1,
814  "Denom CaloParticle Phi per Layer Cluster for layer " + istr2,
815  nintPhi_,
816  minPhi_,
817  maxPhi_);
818  histograms.h_num_layercl_eta_perlayer[ilayer] =
819  ibook.book1D("Num_LayerCluster_Eta_perlayer" + istr1,
820  "Num LayerCluster Eta per Layer Cluster for layer " + istr2,
821  nintEta_,
822  minEta_,
823  maxEta_);
824  histograms.h_numMerge_layercl_eta_perlayer[ilayer] =
825  ibook.book1D("NumMerge_LayerCluster_Eta_perlayer" + istr1,
826  "Num Merge LayerCluster Eta per Layer Cluster for layer " + istr2,
827  nintEta_,
828  minEta_,
829  maxEta_);
830  histograms.h_denom_layercl_eta_perlayer[ilayer] =
831  ibook.book1D("Denom_LayerCluster_Eta_perlayer" + istr1,
832  "Denom LayerCluster Eta per Layer Cluster for layer " + istr2,
833  nintEta_,
834  minEta_,
835  maxEta_);
836  histograms.h_num_layercl_phi_perlayer[ilayer] =
837  ibook.book1D("Num_LayerCluster_Phi_perlayer" + istr1,
838  "Num LayerCluster Phi per Layer Cluster for layer " + istr2,
839  nintPhi_,
840  minPhi_,
841  maxPhi_);
842  histograms.h_numMerge_layercl_phi_perlayer[ilayer] =
843  ibook.book1D("NumMerge_LayerCluster_Phi_perlayer" + istr1,
844  "Num Merge LayerCluster Phi per Layer Cluster for layer " + istr2,
845  nintPhi_,
846  minPhi_,
847  maxPhi_);
848  histograms.h_denom_layercl_phi_perlayer[ilayer] =
849  ibook.book1D("Denom_LayerCluster_Phi_perlayer" + istr1,
850  "Denom LayerCluster Phi per Layer Cluster for layer " + istr2,
851  nintPhi_,
852  minPhi_,
853  maxPhi_);
854  }
855  //---------------------------------------------------------------------------------------------------------------------------
856 }
857 
860  unsigned int layers,
861  std::vector<int> thicknesses) {
862  //----------------------------------------------------------------------------------------------------------------------------
863  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
864  auto istr1 = std::to_string(ilayer);
865  while (istr1.size() < 2) {
866  istr1.insert(0, "0");
867  }
868  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
869  std::string istr2 = "";
870  //First with the -z endcap
871  if (ilayer < layers) {
872  istr2 = std::to_string(ilayer + 1) + " in z-";
873  } else { //Then for the +z
874  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
875  }
876  histograms.h_cellAssociation_perlayer[ilayer] =
877  ibook.book1D("cellAssociation_perlayer" + istr1, "Cell Association for layer " + istr2, 5, -4., 1.);
878  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(2, "TN(purity)");
879  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(3, "FN(ineff.)");
880  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(4, "FP(fake)");
881  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(5, "TP(eff.)");
882  }
883  //----------------------------------------------------------------------------------------------------------------------------
884  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
885  auto istr = std::to_string(*it);
886  histograms.h_cellsenedens_perthick[(*it)] = ibook.book1D("cellsenedens_thick_" + istr,
887  "energy density of cluster cells for thickness " + istr,
891  }
892  //----------------------------------------------------------------------------------------------------------------------------
893  //Not all combination exists but we should keep them all for cross checking reason.
894  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
895  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
896  auto istr1 = std::to_string(*it);
897  auto istr2 = std::to_string(ilayer);
898  while (istr2.size() < 2)
899  istr2.insert(0, "0");
900  auto istr = istr1 + "_" + istr2;
901  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
902  std::string istr3 = "";
903  //First with the -z endcap
904  if (ilayer < layers) {
905  istr3 = std::to_string(ilayer + 1) + " in z- ";
906  } else { //Then for the +z
907  istr3 = std::to_string(ilayer - (layers - 1)) + " in z+ ";
908  }
909  //---
910  histograms.h_cellsnum_perthickperlayer[istr] =
911  ibook.book1D("cellsnum_perthick_perlayer_" + istr,
912  "total number of cells for layer " + istr3 + " for thickness " + istr1,
916  //---
917  histograms.h_distancetoseedcell_perthickperlayer[istr] =
918  ibook.book1D("distancetoseedcell_perthickperlayer_" + istr,
919  "distance of cluster cells to seed cell for layer " + istr3 + " for thickness " + istr1,
923  //---
924  histograms.h_distancetoseedcell_perthickperlayer_eneweighted[istr] = ibook.book1D(
925  "distancetoseedcell_perthickperlayer_eneweighted_" + istr,
926  "energy weighted distance of cluster cells to seed cell for layer " + istr3 + " for thickness " + istr1,
930  //---
931  histograms.h_distancetomaxcell_perthickperlayer[istr] =
932  ibook.book1D("distancetomaxcell_perthickperlayer_" + istr,
933  "distance of cluster cells to max cell for layer " + istr3 + " for thickness " + istr1,
937  //---
938  histograms.h_distancetomaxcell_perthickperlayer_eneweighted[istr] = ibook.book1D(
939  "distancetomaxcell_perthickperlayer_eneweighted_" + istr,
940  "energy weighted distance of cluster cells to max cell for layer " + istr3 + " for thickness " + istr1,
944  //---
945  histograms.h_distancebetseedandmaxcell_perthickperlayer[istr] =
946  ibook.book1D("distancebetseedandmaxcell_perthickperlayer_" + istr,
947  "distance of seed cell to max cell for layer " + istr3 + " for thickness " + istr1,
951  //---
952  histograms.h_distancebetseedandmaxcellvsclusterenergy_perthickperlayer[istr] = ibook.book2D(
953  "distancebetseedandmaxcellvsclusterenergy_perthickperlayer_" + istr,
954  "distance of seed cell to max cell vs cluster energy for layer " + istr3 + " for thickness " + istr1,
961  }
962  }
963 }
964 //----------------------------------------------------------------------------------------------------------------------------
965 
968  unsigned int layers) {
969  histograms.h_score_multicl2caloparticle.push_back(ibook.book1D(
970  "Score_multicl2caloparticle", "Score of Multi Cluster per CaloParticle", nintScore_, minScore_, maxScore_));
971  histograms.h_score_caloparticle2multicl.push_back(ibook.book1D(
972  "Score_caloparticle2multicl", "Score of CaloParticle per Multi Cluster", nintScore_, minScore_, maxScore_));
973  histograms.h_energy_vs_score_multicl2caloparticle.push_back(
974  ibook.book2D("Energy_vs_Score_multi2caloparticle",
975  "Energy vs Score of Multi Cluster per CaloParticle",
976  nintScore_,
977  minScore_,
978  maxScore_,
982  histograms.h_energy_vs_score_caloparticle2multicl.push_back(
983  ibook.book2D("Energy_vs_Score_caloparticle2multi",
984  "Energy vs Score of CaloParticle per Multi Cluster",
985  nintScore_,
986  minScore_,
987  maxScore_,
991 
992  //back to all multiclusters
993  histograms.h_num_multicl_eta.push_back(
994  ibook.book1D("Num_MultiCluster_Eta", "Num MultiCluster Eta per Multi Cluster ", nintEta_, minEta_, maxEta_));
995  histograms.h_numMerge_multicl_eta.push_back(ibook.book1D(
996  "NumMerge_MultiCluster_Eta", "Num Merge MultiCluster Eta per Multi Cluster ", nintEta_, minEta_, maxEta_));
997  histograms.h_denom_multicl_eta.push_back(
998  ibook.book1D("Denom_MultiCluster_Eta", "Denom MultiCluster Eta per Multi Cluster", nintEta_, minEta_, maxEta_));
999  histograms.h_num_multicl_phi.push_back(
1000  ibook.book1D("Num_MultiCluster_Phi", "Num MultiCluster Phi per Multi Cluster ", nintPhi_, minPhi_, maxPhi_));
1001  histograms.h_numMerge_multicl_phi.push_back(ibook.book1D(
1002  "NumMerge_MultiCluster_Phi", "Num Merge MultiCluster Phi per Multi Cluster", nintPhi_, minPhi_, maxPhi_));
1003  histograms.h_denom_multicl_phi.push_back(
1004  ibook.book1D("Denom_MultiCluster_Phi", "Denom MultiCluster Phi per Multi Cluster", nintPhi_, minPhi_, maxPhi_));
1005  histograms.h_sharedenergy_multicl2caloparticle.push_back(
1006  ibook.book1D("SharedEnergy_multicluster2caloparticle",
1007  "Shared Energy of Multi Cluster per Calo Particle in each layer",
1011  histograms.h_sharedenergy_multicl2caloparticle_vs_eta.push_back(
1012  ibook.bookProfile("SharedEnergy_multicl2caloparticle_vs_eta",
1013  "Shared Energy of MultiCluster vs #eta per best Calo Particle in each layer",
1014  nintEta_,
1015  minEta_,
1016  maxEta_,
1019  histograms.h_sharedenergy_multicl2caloparticle_vs_phi.push_back(
1020  ibook.bookProfile("SharedEnergy_multicl2caloparticle_vs_phi",
1021  "Shared Energy of MultiCluster vs #phi per best Calo Particle in each layer",
1022  nintPhi_,
1023  minPhi_,
1024  maxPhi_,
1027  histograms.h_sharedenergy_caloparticle2multicl.push_back(
1028  ibook.book1D("SharedEnergy_caloparticle2multicl",
1029  "Shared Energy of CaloParticle per Multi Cluster",
1033  histograms.h_sharedenergy_caloparticle2multicl_assoc.push_back(
1034  ibook.book1D("SharedEnergy_caloparticle2multicl_assoc",
1035  "Shared Energy of Associated CaloParticle per Multi Cluster",
1039  histograms.h_sharedenergy_caloparticle2multicl_vs_eta.push_back(
1040  ibook.bookProfile("SharedEnergy_caloparticle2multicl_vs_eta",
1041  "Shared Energy of CaloParticle vs #eta per best Multi Cluster",
1042  nintEta_,
1043  minEta_,
1044  maxEta_,
1047  histograms.h_sharedenergy_caloparticle2multicl_vs_phi.push_back(
1048  ibook.bookProfile("SharedEnergy_caloparticle2multicl_vs_phi",
1049  "Shared Energy of CaloParticle vs #phi per best Multi Cluster",
1050  nintPhi_,
1051  minPhi_,
1052  maxPhi_,
1055  histograms.h_num_caloparticle_eta.push_back(
1056  ibook.book1D("Num_CaloParticle_Eta", "Num CaloParticle Eta per Multi Cluster", nintEta_, minEta_, maxEta_));
1057  histograms.h_numDup_multicl_eta.push_back(
1058  ibook.book1D("NumDup_MultiCluster_Eta", "Num Duplicate MultiCl vs Eta", nintEta_, minEta_, maxEta_));
1059  histograms.h_denom_caloparticle_eta.push_back(
1060  ibook.book1D("Denom_CaloParticle_Eta", "Denom CaloParticle Eta per Multi Cluster", nintEta_, minEta_, maxEta_));
1061  histograms.h_num_caloparticle_phi.push_back(
1062  ibook.book1D("Num_CaloParticle_Phi", "Num CaloParticle Phi per Multi Cluster", nintPhi_, minPhi_, maxPhi_));
1063  histograms.h_numDup_multicl_phi.push_back(
1064  ibook.book1D("NumDup_MultiCluster_Phi", "Num Duplicate MultiCl vs Phi", nintPhi_, minPhi_, maxPhi_));
1065  histograms.h_denom_caloparticle_phi.push_back(
1066  ibook.book1D("Denom_CaloParticle_Phi", "Denom CaloParticle Phi per Multi Cluster", nintPhi_, minPhi_, maxPhi_));
1067 
1068  std::unordered_map<int, dqm::reco::MonitorElement*> clusternum_in_multicluster_perlayer;
1069  clusternum_in_multicluster_perlayer.clear();
1070 
1071  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
1072  auto istr1 = std::to_string(ilayer);
1073  while (istr1.size() < 2) {
1074  istr1.insert(0, "0");
1075  }
1076  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
1077  std::string istr2 = "";
1078  //First with the -z endcap
1079  if (ilayer < layers) {
1080  istr2 = std::to_string(ilayer + 1) + " in z-";
1081  } else { //Then for the +z
1082  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
1083  }
1084 
1085  clusternum_in_multicluster_perlayer[ilayer] =
1086  ibook.book1D("clusternum_in_multicluster_perlayer" + istr1,
1087  "Number of layer clusters in multicluster for layer " + istr2,
1091  }
1092 
1093  histograms.h_clusternum_in_multicluster_perlayer.push_back(std::move(clusternum_in_multicluster_perlayer));
1094 
1095  histograms.h_multiclusternum.push_back(
1096  ibook.book1D("totmulticlusternum", "total number of multiclusters", nintTotNMCLs_, minTotNMCLs_, maxTotNMCLs_));
1097 
1098  histograms.h_contmulticlusternum.push_back(ibook.book1D("contmulticlusternum",
1099  "number of multiclusters with 3 contiguous layers",
1100  nintTotNMCLs_,
1101  minTotNMCLs_,
1102  maxTotNMCLs_));
1103 
1104  histograms.h_noncontmulticlusternum.push_back(ibook.book1D("noncontmulticlusternum",
1105  "number of multiclusters without 3 contiguous layers",
1106  nintTotNMCLs_,
1107  minTotNMCLs_,
1108  maxTotNMCLs_));
1109 
1110  histograms.h_clusternum_in_multicluster.push_back(ibook.book1D("clusternum_in_multicluster",
1111  "total number of layer clusters in multicluster",
1115 
1116  histograms.h_clusternum_in_multicluster_vs_layer.push_back(
1117  ibook.bookProfile("clusternum_in_multicluster_vs_layer",
1118  "Profile of 2d layer clusters in multicluster vs layer number",
1119  2 * layers,
1120  0.,
1121  2. * layers,
1124 
1125  histograms.h_multiplicityOfLCinMCL.push_back(ibook.book2D("multiplicityOfLCinMCL",
1126  "Multiplicity vs Layer cluster size in Multiclusters",
1127  nintMplofLCs_,
1128  minMplofLCs_,
1129  maxMplofLCs_,
1133 
1134  histograms.h_multiplicity_numberOfEventsHistogram.push_back(ibook.book1D("multiplicity_numberOfEventsHistogram",
1135  "multiplicity numberOfEventsHistogram",
1136  nintMplofLCs_,
1137  minMplofLCs_,
1138  maxMplofLCs_));
1139 
1140  histograms.h_multiplicity_zminus_numberOfEventsHistogram.push_back(
1141  ibook.book1D("multiplicity_zminus_numberOfEventsHistogram",
1142  "multiplicity numberOfEventsHistogram in z-",
1143  nintMplofLCs_,
1144  minMplofLCs_,
1145  maxMplofLCs_));
1146 
1147  histograms.h_multiplicity_zplus_numberOfEventsHistogram.push_back(
1148  ibook.book1D("multiplicity_zplus_numberOfEventsHistogram",
1149  "multiplicity numberOfEventsHistogram in z+",
1150  nintMplofLCs_,
1151  minMplofLCs_,
1152  maxMplofLCs_));
1153 
1154  histograms.h_multiplicityOfLCinMCL_vs_layercluster_zminus.push_back(
1155  ibook.book2D("multiplicityOfLCinMCL_vs_layercluster_zminus",
1156  "Multiplicity vs Layer number in z-",
1157  nintMplofLCs_,
1158  minMplofLCs_,
1159  maxMplofLCs_,
1160  layers,
1161  0.,
1162  (float)layers));
1163 
1164  histograms.h_multiplicityOfLCinMCL_vs_layercluster_zplus.push_back(
1165  ibook.book2D("multiplicityOfLCinMCL_vs_layercluster_zplus",
1166  "Multiplicity vs Layer number in z+",
1167  nintMplofLCs_,
1168  minMplofLCs_,
1169  maxMplofLCs_,
1170  layers,
1171  0.,
1172  (float)layers));
1173 
1174  histograms.h_multiplicityOfLCinMCL_vs_layerclusterenergy.push_back(
1175  ibook.book2D("multiplicityOfLCinMCL_vs_layerclusterenergy",
1176  "Multiplicity vs Layer cluster energy",
1177  nintMplofLCs_,
1178  minMplofLCs_,
1179  maxMplofLCs_,
1183 
1184  histograms.h_multicluster_pt.push_back(
1185  ibook.book1D("multicluster_pt", "Pt of the multicluster", nintPt_, minPt_, maxPt_));
1186  histograms.h_multicluster_eta.push_back(
1187  ibook.book1D("multicluster_eta", "Eta of the multicluster", nintEta_, minEta_, maxEta_));
1188  histograms.h_multicluster_phi.push_back(
1189  ibook.book1D("multicluster_phi", "Phi of the multicluster", nintPhi_, minPhi_, maxPhi_));
1190  histograms.h_multicluster_energy.push_back(
1191  ibook.book1D("multicluster_energy", "Energy of the multicluster", nintEne_, minEne_, maxEne_));
1192  histograms.h_multicluster_x.push_back(
1193  ibook.book1D("multicluster_x", "X position of the multicluster", nintX_, minX_, maxX_));
1194  histograms.h_multicluster_y.push_back(
1195  ibook.book1D("multicluster_y", "Y position of the multicluster", nintY_, minY_, maxY_));
1196  histograms.h_multicluster_z.push_back(
1197  ibook.book1D("multicluster_z", "Z position of the multicluster", nintZ_, minZ_, maxZ_));
1198  histograms.h_multicluster_firstlayer.push_back(
1199  ibook.book1D("multicluster_firstlayer", "First layer of the multicluster", 2 * layers, 0., (float)2 * layers));
1200  histograms.h_multicluster_lastlayer.push_back(
1201  ibook.book1D("multicluster_lastlayer", "Last layer of the multicluster", 2 * layers, 0., (float)2 * layers));
1202  histograms.h_multicluster_layersnum.push_back(ibook.book1D(
1203  "multicluster_layersnum", "Number of layers of the multicluster", 2 * layers, 0., (float)2 * layers));
1204 }
1205 
1207  //We will save some info straight from geometry to avoid mistakes from updates
1208  //----------- TODO ----------------------------------------------------------
1209  //For now values returned for 'lastLayerFHzp': '104', 'lastLayerFHzm': '52' are not the one expected.
1210  //Will come back to this when there will be info in CMSSW to put in DQM file.
1211  histograms.lastLayerEEzm->Fill(recHitTools_->lastLayerEE());
1212  histograms.lastLayerFHzm->Fill(recHitTools_->lastLayerFH());
1213  histograms.maxlayerzm->Fill(layers);
1214  histograms.lastLayerEEzp->Fill(recHitTools_->lastLayerEE() + layers);
1215  histograms.lastLayerFHzp->Fill(recHitTools_->lastLayerFH() + layers);
1216  histograms.maxlayerzp->Fill(layers + layers);
1217 }
1218 
1220  int pdgid,
1221  const CaloParticle& caloparticle,
1222  std::vector<SimVertex> const& simVertices,
1223  unsigned int layers,
1224  std::unordered_map<DetId, const HGCRecHit*> const& hitMap) const {
1225  const auto eta = getEta(caloparticle.eta());
1226  if (histograms.h_caloparticle_eta.count(pdgid)) {
1227  histograms.h_caloparticle_eta.at(pdgid)->Fill(eta);
1228  }
1229  if (histograms.h_caloparticle_eta_Zorigin.count(pdgid)) {
1230  histograms.h_caloparticle_eta_Zorigin.at(pdgid)->Fill(
1231  simVertices.at(caloparticle.g4Tracks()[0].vertIndex()).position().z(), eta);
1232  }
1233 
1234  if (histograms.h_caloparticle_energy.count(pdgid)) {
1235  histograms.h_caloparticle_energy.at(pdgid)->Fill(caloparticle.energy());
1236  }
1237  if (histograms.h_caloparticle_pt.count(pdgid)) {
1238  histograms.h_caloparticle_pt.at(pdgid)->Fill(caloparticle.pt());
1239  }
1240  if (histograms.h_caloparticle_phi.count(pdgid)) {
1241  histograms.h_caloparticle_phi.at(pdgid)->Fill(caloparticle.phi());
1242  }
1243 
1244  if (histograms.h_caloparticle_nSimClusters.count(pdgid)) {
1245  histograms.h_caloparticle_nSimClusters.at(pdgid)->Fill(caloparticle.simClusters().size());
1246 
1247  int simHits = 0;
1248  int minLayerId = 999;
1249  int maxLayerId = 0;
1250 
1251  int simHits_matched = 0;
1252  int minLayerId_matched = 999;
1253  int maxLayerId_matched = 0;
1254 
1255  float energy = 0.;
1256  std::map<int, double> totenergy_layer;
1257 
1258  for (auto const& sc : caloparticle.simClusters()) {
1259  LogDebug("HGCalValidator") << " This sim cluster has " << sc->hits_and_fractions().size() << " simHits and "
1260  << sc->energy() << " energy. " << std::endl;
1261  simHits += sc->hits_and_fractions().size();
1262  for (auto const& h_and_f : sc->hits_and_fractions()) {
1263  const auto hitDetId = h_and_f.first;
1264  int layerId =
1265  recHitTools_->getLayerWithOffset(hitDetId) + layers * ((recHitTools_->zside(hitDetId) + 1) >> 1) - 1;
1266  // set to 0 if matched RecHit not found
1267  int layerId_matched_min = 999;
1268  int layerId_matched_max = 0;
1269  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(hitDetId);
1270  if (itcheck != hitMap.end()) {
1271  layerId_matched_min = layerId;
1272  layerId_matched_max = layerId;
1273  simHits_matched++;
1274 
1275  const HGCRecHit* hit = itcheck->second;
1276  energy += hit->energy() * h_and_f.second;
1277  histograms.h_caloparticle_nHits_matched_energy.at(pdgid)->Fill(hit->energy() * h_and_f.second);
1278  histograms.h_caloparticle_nHits_matched_energy_layer.at(pdgid)->Fill(layerId, hit->energy() * h_and_f.second);
1279 
1280  if (totenergy_layer.find(layerId) != totenergy_layer.end()) {
1281  totenergy_layer[layerId] = totenergy_layer.at(layerId) + hit->energy();
1282  } else {
1283  totenergy_layer.emplace(layerId, hit->energy());
1284  }
1285  if (caloparticle.simClusters().size() == 1)
1286  histograms.h_caloparticle_nHits_matched_energy_layer_1SimCl.at(pdgid)->Fill(layerId,
1287  hit->energy() * h_and_f.second);
1288  } else {
1289  LogDebug("HGCalValidator") << " matched to RecHit NOT found !" << std::endl;
1290  }
1291 
1292  minLayerId = std::min(minLayerId, layerId);
1293  maxLayerId = std::max(maxLayerId, layerId);
1294  minLayerId_matched = std::min(minLayerId_matched, layerId_matched_min);
1295  maxLayerId_matched = std::max(maxLayerId_matched, layerId_matched_max);
1296  }
1297  LogDebug("HGCalValidator") << std::endl;
1298  }
1299  histograms.h_caloparticle_firstlayer.at(pdgid)->Fill(minLayerId);
1300  histograms.h_caloparticle_lastlayer.at(pdgid)->Fill(maxLayerId);
1301  histograms.h_caloparticle_layersnum.at(pdgid)->Fill(int(maxLayerId - minLayerId));
1302 
1303  histograms.h_caloparticle_firstlayer_matchedtoRecHit.at(pdgid)->Fill(minLayerId_matched);
1304  histograms.h_caloparticle_lastlayer_matchedtoRecHit.at(pdgid)->Fill(maxLayerId_matched);
1305  histograms.h_caloparticle_layersnum_matchedtoRecHit.at(pdgid)->Fill(int(maxLayerId_matched - minLayerId_matched));
1306 
1307  histograms.h_caloparticle_nHitsInSimClusters.at(pdgid)->Fill((float)simHits);
1308  histograms.h_caloparticle_nHitsInSimClusters_matchedtoRecHit.at(pdgid)->Fill((float)simHits_matched);
1309  histograms.h_caloparticle_selfenergy.at(pdgid)->Fill((float)energy);
1310  histograms.h_caloparticle_energyDifference.at(pdgid)->Fill((float)1. - energy / caloparticle.energy());
1311 
1312  //Calculate sum energy per-layer
1313  auto i = totenergy_layer.begin();
1314  double sum_energy = 0.0;
1315  while (i != totenergy_layer.end()) {
1316  sum_energy += i->second;
1317  histograms.h_caloparticle_sum_energy_layer.at(pdgid)->Fill(i->first, sum_energy / caloparticle.energy() * 100.);
1318  i++;
1319  }
1320  }
1321 }
1322 
1323 void HGVHistoProducerAlgo::HGVHistoProducerAlgo::fill_simcluster_histos(const Histograms& histograms,
1324  std::vector<SimCluster> const& simclusters,
1325  unsigned int layers,
1326  std::vector<int> thicknesses) const {
1327  //Each event to be treated as two events: an event in +ve endcap,
1328  //plus another event in -ve endcap. In this spirit there will be
1329  //a layer variable (layerid) that maps the layers in :
1330  //-z: 0->49
1331  //+z: 50->99
1332 
1333  //To keep track of total num of simclusters per layer
1334  //tnscpl[layerid]
1335  std::vector<int> tnscpl(1000, 0); //tnscpl.clear(); tnscpl.reserve(1000);
1336 
1337  //To keep track of the total num of clusters per thickness in plus and in minus endcaps
1338  std::map<std::string, int> tnscpthplus;
1339  tnscpthplus.clear();
1340  std::map<std::string, int> tnscpthminus;
1341  tnscpthminus.clear();
1342  //At the beginning of the event all layers should be initialized to zero total clusters per thickness
1343  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
1344  tnscpthplus.insert(std::pair<std::string, int>(std::to_string(*it), 0));
1345  tnscpthminus.insert(std::pair<std::string, int>(std::to_string(*it), 0));
1346  }
1347  //To keep track of the total num of simclusters with mixed thickness hits per event
1348  tnscpthplus.insert(std::pair<std::string, int>("mixed", 0));
1349  tnscpthminus.insert(std::pair<std::string, int>("mixed", 0));
1350 
1351  //loop through simclusters
1352  for (unsigned int ic = 0; ic < simclusters.size(); ++ic) {
1353  const auto& sc = simclusters[ic];
1354  const auto& hitsAndFractions = sc.hits_and_fractions();
1355 
1356  //Auxillary variables to count the number of different kind of hits in each simcluster
1357  int nthhits120p = 0;
1358  int nthhits200p = 0;
1359  int nthhits300p = 0;
1360  int nthhitsscintp = 0;
1361  int nthhits120m = 0;
1362  int nthhits200m = 0;
1363  int nthhits300m = 0;
1364  int nthhitsscintm = 0;
1365  //For the hits thickness of the layer cluster.
1366  double thickness = 0.;
1367  //To keep track if we added the simcluster in a specific layer
1368  std::vector<int> occurenceSCinlayer(1000, 0); //[layerid][0 if not added]
1369 
1370  //loop through hits of the simcluster
1371  for (const auto& hAndF : hitsAndFractions) {
1372  const DetId sh_detid = hAndF.first;
1373 
1374  //The layer the cluster belongs to. As mentioned in the mapping above, it takes into account -z and +z.
1375  int layerid =
1376  recHitTools_->getLayerWithOffset(sh_detid) + layers * ((recHitTools_->zside(sh_detid) + 1) >> 1) - 1;
1377  //zside that the current cluster belongs to.
1378  int zside = recHitTools_->zside(sh_detid);
1379 
1380  //add the simcluster to the relevant layer. A simcluster may give contribution to several layers.
1381  if (occurenceSCinlayer[layerid] == 0) {
1382  tnscpl[layerid]++;
1383  }
1384  occurenceSCinlayer[layerid]++;
1385 
1386  if (sh_detid.det() == DetId::Forward || sh_detid.det() == DetId::HGCalEE || sh_detid.det() == DetId::HGCalHSi) {
1387  thickness = recHitTools_->getSiThickness(sh_detid);
1388  } else if (sh_detid.det() == DetId::HGCalHSc) {
1389  thickness = -1;
1390  } else {
1391  LogDebug("HGCalValidator") << "These are HGCal simclusters, you shouldn't be here !!! " << layerid << "\n";
1392  continue;
1393  }
1394 
1395  if ((thickness == 120.) && (zside > 0.)) {
1396  nthhits120p++;
1397  } else if ((thickness == 120.) && (zside < 0.)) {
1398  nthhits120m++;
1399  } else if ((thickness == 200.) && (zside > 0.)) {
1400  nthhits200p++;
1401  } else if ((thickness == 200.) && (zside < 0.)) {
1402  nthhits200m++;
1403  } else if ((thickness == 300.) && (zside > 0.)) {
1404  nthhits300p++;
1405  } else if ((thickness == 300.) && (zside < 0.)) {
1406  nthhits300m++;
1407  } else if ((thickness == -1) && (zside > 0.)) {
1408  nthhitsscintp++;
1409  } else if ((thickness == -1) && (zside < 0.)) {
1410  nthhitsscintm++;
1411  } else { //assert(0);
1412  LogDebug("HGCalValidator")
1413  << " You are running a geometry that contains thicknesses different than the normal ones. "
1414  << "\n";
1415  }
1416 
1417  } //end of loop through hits
1418 
1419  //Check for simultaneously having hits of different kind. Checking at least two combinations is sufficient.
1420  if ((nthhits120p != 0 && nthhits200p != 0) || (nthhits120p != 0 && nthhits300p != 0) ||
1421  (nthhits120p != 0 && nthhitsscintp != 0) || (nthhits200p != 0 && nthhits300p != 0) ||
1422  (nthhits200p != 0 && nthhitsscintp != 0) || (nthhits300p != 0 && nthhitsscintp != 0)) {
1423  tnscpthplus["mixed"]++;
1424  } else if ((nthhits120p != 0 || nthhits200p != 0 || nthhits300p != 0 || nthhitsscintp != 0)) {
1425  //This is a cluster with hits of one kind
1426  tnscpthplus[std::to_string((int)thickness)]++;
1427  }
1428  if ((nthhits120m != 0 && nthhits200m != 0) || (nthhits120m != 0 && nthhits300m != 0) ||
1429  (nthhits120m != 0 && nthhitsscintm != 0) || (nthhits200m != 0 && nthhits300m != 0) ||
1430  (nthhits200m != 0 && nthhitsscintm != 0) || (nthhits300m != 0 && nthhitsscintm != 0)) {
1431  tnscpthminus["mixed"]++;
1432  } else if ((nthhits120m != 0 || nthhits200m != 0 || nthhits300m != 0 || nthhitsscintm != 0)) {
1433  //This is a cluster with hits of one kind
1434  tnscpthminus[std::to_string((int)thickness)]++;
1435  }
1436 
1437  } //end of loop through simclusters of the event
1438 
1439  //Per layer : Loop 0->99
1440  for (unsigned ilayer = 0; ilayer < layers * 2; ++ilayer) {
1441  if (histograms.h_simclusternum_perlayer.count(ilayer)) {
1442  histograms.h_simclusternum_perlayer.at(ilayer)->Fill(tnscpl[ilayer]);
1443  }
1444  } //end of loop through layers
1445 
1446  //Per thickness
1447  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
1448  if (histograms.h_simclusternum_perthick.count(*it)) {
1449  histograms.h_simclusternum_perthick.at(*it)->Fill(tnscpthplus[std::to_string(*it)]);
1450  histograms.h_simclusternum_perthick.at(*it)->Fill(tnscpthminus[std::to_string(*it)]);
1451  }
1452  }
1453  //Mixed thickness clusters
1454  histograms.h_mixedhitssimcluster_zplus->Fill(tnscpthplus["mixed"]);
1455  histograms.h_mixedhitssimcluster_zminus->Fill(tnscpthminus["mixed"]);
1456 }
1457 
1458 void HGVHistoProducerAlgo::HGVHistoProducerAlgo::fill_simclusterassosiation_histos(
1459  const Histograms& histograms,
1460  int count,
1463  edm::Handle<std::vector<SimCluster>> simClusterHandle,
1464  std::vector<SimCluster> const& simclusters,
1465  std::vector<size_t> const& sCIndices,
1466  const std::vector<float>& mask,
1467  std::unordered_map<DetId, const HGCRecHit*> const& hitMap,
1468  unsigned int layers,
1469  const hgcal::RecoToSimCollectionWithSimClusters& scsInLayerClusterMap,
1470  const hgcal::SimToRecoCollectionWithSimClusters& lcsInSimClusterMap) const {
1471  //Each event to be treated as two events: an event in +ve endcap,
1472  //plus another event in -ve endcap. In this spirit there will be
1473  //a layer variable (layerid) that maps the layers in :
1474  //-z: 0->49
1475  //+z: 50->99
1476 
1477  //Will add some general plots on the specific mask in the future.
1478 
1479  layerClusters_to_SimClusters(histograms,
1480  count,
1481  clusterHandle,
1482  clusters,
1483  simClusterHandle,
1484  simclusters,
1485  sCIndices,
1486  mask,
1487  hitMap,
1488  layers,
1489  scsInLayerClusterMap,
1490  lcsInSimClusterMap);
1491 }
1492 
1494  int count,
1495  const reco::CaloCluster& cluster) const {
1496  const auto eta = getEta(cluster.eta());
1497  histograms.h_cluster_eta[count]->Fill(eta);
1498 }
1499 
1503  edm::Handle<std::vector<CaloParticle>> caloParticleHandle,
1504  std::vector<CaloParticle> const& cP,
1505  std::vector<size_t> const& cPIndices,
1506  std::vector<size_t> const& cPSelectedIndices,
1507  std::unordered_map<DetId, const HGCRecHit*> const& hitMap,
1508  unsigned int layers,
1509  const hgcal::RecoToSimCollection& cpsInLayerClusterMap,
1510  const hgcal::SimToRecoCollection& cPOnLayerMap) const {
1511  auto nLayerClusters = clusters.size();
1512 
1513  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>> detIdToCaloParticleId_Map;
1514  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>> detIdToLayerClusterId_Map;
1515 
1516  // The association has to be done in an all-vs-all fashion.
1517  // For this reason we use the full set of caloParticles, with the only filter on bx
1518  for (const auto& cpId : cPIndices) {
1519  const SimClusterRefVector& simClusterRefVector = cP[cpId].simClusters();
1520  for (const auto& it_sc : simClusterRefVector) {
1521  const SimCluster& simCluster = (*(it_sc));
1522  const auto& hits_and_fractions = simCluster.hits_and_fractions();
1523  for (const auto& it_haf : hits_and_fractions) {
1524  DetId hitid = (it_haf.first);
1525  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(hitid);
1526  if (itcheck != hitMap.end()) {
1527  auto hit_find_it = detIdToCaloParticleId_Map.find(hitid);
1528  if (hit_find_it == detIdToCaloParticleId_Map.end()) {
1529  detIdToCaloParticleId_Map[hitid] = std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>();
1530  detIdToCaloParticleId_Map[hitid].emplace_back(
1531  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
1532  } else {
1533  auto findHitIt = std::find(detIdToCaloParticleId_Map[hitid].begin(),
1534  detIdToCaloParticleId_Map[hitid].end(),
1535  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
1536  if (findHitIt != detIdToCaloParticleId_Map[hitid].end()) {
1537  findHitIt->fraction += it_haf.second;
1538  } else {
1539  detIdToCaloParticleId_Map[hitid].emplace_back(
1540  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
1541  }
1542  }
1543  }
1544  }
1545  }
1546  }
1547 
1548  for (unsigned int lcId = 0; lcId < nLayerClusters; ++lcId) {
1549  const std::vector<std::pair<DetId, float>>& hits_and_fractions = clusters[lcId].hitsAndFractions();
1550  unsigned int numberOfHitsInLC = hits_and_fractions.size();
1551 
1552  // This vector will store, for each hit in the Layercluster, the index of
1553  // the CaloParticle that contributed the most, in terms of energy, to it.
1554  // Special values are:
1555  //
1556  // -2 --> the reconstruction fraction of the RecHit is 0 (used in the past to monitor Halo Hits)
1557  // -3 --> same as before with the added condition that no CaloParticle has been linked to this RecHit
1558  // -1 --> the reco fraction is >0, but no CaloParticle has been linked to it
1559  // >=0 --> index of the linked CaloParticle
1560  std::vector<int> hitsToCaloParticleId(numberOfHitsInLC);
1561  const auto firstHitDetId = hits_and_fractions[0].first;
1562  int lcLayerId =
1563  recHitTools_->getLayerWithOffset(firstHitDetId) + layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
1564 
1565  // This will store the fraction of the CaloParticle energy shared with the LayerCluster: e_shared/cp_energy
1566  std::unordered_map<unsigned, float> CPEnergyInLC;
1567 
1568  for (unsigned int hitId = 0; hitId < numberOfHitsInLC; hitId++) {
1569  DetId rh_detid = hits_and_fractions[hitId].first;
1570  auto rhFraction = hits_and_fractions[hitId].second;
1571 
1572  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
1573  const HGCRecHit* hit = itcheck->second;
1574 
1575  auto hit_find_in_LC = detIdToLayerClusterId_Map.find(rh_detid);
1576  if (hit_find_in_LC == detIdToLayerClusterId_Map.end()) {
1577  detIdToLayerClusterId_Map[rh_detid] = std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>();
1578  }
1579  detIdToLayerClusterId_Map[rh_detid].emplace_back(HGVHistoProducerAlgo::detIdInfoInCluster{lcId, rhFraction});
1580 
1581  auto hit_find_in_CP = detIdToCaloParticleId_Map.find(rh_detid);
1582 
1583  // if the fraction is zero or the hit does not belong to any calo
1584  // particle, set the caloparticleId for the hit to -1 this will
1585  // contribute to the number of noise hits
1586 
1587  // MR Remove the case in which the fraction is 0, since this could be a
1588  // real hit that has been marked as halo.
1589  if (rhFraction == 0.) {
1590  hitsToCaloParticleId[hitId] = -2;
1591  }
1592  if (hit_find_in_CP == detIdToCaloParticleId_Map.end()) {
1593  hitsToCaloParticleId[hitId] -= 1;
1594  } else {
1595  auto maxCPEnergyInLC = 0.f;
1596  auto maxCPId = -1;
1597  for (auto& h : hit_find_in_CP->second) {
1598  CPEnergyInLC[h.clusterId] += h.fraction * hit->energy();
1599  // Keep track of which CaloParticle contributed the most, in terms
1600  // of energy, to this specific LayerCluster.
1601  if (CPEnergyInLC[h.clusterId] > maxCPEnergyInLC) {
1602  maxCPEnergyInLC = CPEnergyInLC[h.clusterId];
1603  maxCPId = h.clusterId;
1604  }
1605  }
1606  hitsToCaloParticleId[hitId] = maxCPId;
1607  }
1608  histograms.h_cellAssociation_perlayer.at(lcLayerId)->Fill(
1609  hitsToCaloParticleId[hitId] > 0. ? 0. : hitsToCaloParticleId[hitId]);
1610  } // End loop over hits on a LayerCluster
1611 
1612  } // End of loop over LayerClusters
1613 
1614  // Here we do fill the plots to compute the different metrics linked to
1615  // reco-level, namely fake-rate an merge-rate. In this loop we should *not*
1616  // restrict only to the selected caloParaticles.
1617  for (unsigned int lcId = 0; lcId < nLayerClusters; ++lcId) {
1618  const std::vector<std::pair<DetId, float>>& hits_and_fractions = clusters[lcId].hitsAndFractions();
1619  const auto firstHitDetId = hits_and_fractions[0].first;
1620  const int lcLayerId =
1621  recHitTools_->getLayerWithOffset(firstHitDetId) + layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
1622  histograms.h_denom_layercl_eta_perlayer.at(lcLayerId)->Fill(clusters[lcId].eta());
1623  histograms.h_denom_layercl_phi_perlayer.at(lcLayerId)->Fill(clusters[lcId].phi());
1624  //
1625  const edm::Ref<reco::CaloClusterCollection> lcRef(clusterHandle, lcId);
1626  const auto& cpsIt = cpsInLayerClusterMap.find(lcRef);
1627  if (cpsIt == cpsInLayerClusterMap.end())
1628  continue;
1629 
1630  const auto& cps = cpsIt->val;
1631  if (clusters[lcId].energy() == 0. && !cps.empty()) {
1632  for (const auto& cpPair : cps) {
1633  histograms.h_score_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(cpPair.second);
1634  }
1635  continue;
1636  }
1637  for (const auto& cpPair : cps) {
1638  LogDebug("HGCalValidator") << "layerCluster Id: \t" << lcId << "\t CP id: \t" << cpPair.first.index()
1639  << "\t score \t" << cpPair.second << std::endl;
1640  histograms.h_score_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(cpPair.second);
1641  auto const& cp_linked =
1642  std::find_if(std::begin(cPOnLayerMap[cpPair.first]),
1643  std::end(cPOnLayerMap[cpPair.first]),
1644  [&lcRef](const std::pair<edm::Ref<reco::CaloClusterCollection>, std::pair<float, float>>& p) {
1645  return p.first == lcRef;
1646  });
1647  if (cp_linked ==
1648  cPOnLayerMap[cpPair.first].end()) // This should never happen by construction of the association maps
1649  continue;
1650  histograms.h_sharedenergy_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(
1651  cp_linked->second.first / clusters[lcId].energy(), clusters[lcId].energy());
1652  histograms.h_energy_vs_score_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(
1653  cpPair.second, cp_linked->second.first / clusters[lcId].energy());
1654  }
1655  const auto assoc =
1656  std::count_if(std::begin(cps), std::end(cps), [](const auto& obj) { return obj.second < ScoreCutLCtoCP_; });
1657  if (assoc) {
1658  histograms.h_num_layercl_eta_perlayer.at(lcLayerId)->Fill(clusters[lcId].eta());
1659  histograms.h_num_layercl_phi_perlayer.at(lcLayerId)->Fill(clusters[lcId].phi());
1660  if (assoc > 1) {
1661  histograms.h_numMerge_layercl_eta_perlayer.at(lcLayerId)->Fill(clusters[lcId].eta());
1662  histograms.h_numMerge_layercl_phi_perlayer.at(lcLayerId)->Fill(clusters[lcId].phi());
1663  }
1664  const auto& best = std::min_element(
1665  std::begin(cps), std::end(cps), [](const auto& obj1, const auto& obj2) { return obj1.second < obj2.second; });
1666  const auto& best_cp_linked =
1667  std::find_if(std::begin(cPOnLayerMap[best->first]),
1668  std::end(cPOnLayerMap[best->first]),
1669  [&lcRef](const std::pair<edm::Ref<reco::CaloClusterCollection>, std::pair<float, float>>& p) {
1670  return p.first == lcRef;
1671  });
1672  if (best_cp_linked ==
1673  cPOnLayerMap[best->first].end()) // This should never happen by construction of the association maps
1674  continue;
1675  histograms.h_sharedenergy_layercl2caloparticle_vs_eta_perlayer.at(lcLayerId)->Fill(
1676  clusters[lcId].eta(), best_cp_linked->second.first / clusters[lcId].energy());
1677  histograms.h_sharedenergy_layercl2caloparticle_vs_phi_perlayer.at(lcLayerId)->Fill(
1678  clusters[lcId].phi(), best_cp_linked->second.first / clusters[lcId].energy());
1679  }
1680  } // End of loop over LayerClusters
1681 
1682  // Here we do fill the plots to compute the different metrics linked to
1683  // gen-level, namely efficiency and duplicate. In this loop we should restrict
1684  // only to the selected caloParaticles.
1685  for (const auto& cpId : cPSelectedIndices) {
1686  const edm::Ref<CaloParticleCollection> cpRef(caloParticleHandle, cpId);
1687  const auto& lcsIt = cPOnLayerMap.find(cpRef);
1688 
1689  std::map<unsigned int, float> cPEnergyOnLayer;
1690  for (unsigned int layerId = 0; layerId < layers * 2; ++layerId)
1691  cPEnergyOnLayer[layerId] = 0;
1692 
1693  const SimClusterRefVector& simClusterRefVector = cP[cpId].simClusters();
1694  for (const auto& it_sc : simClusterRefVector) {
1695  const SimCluster& simCluster = (*(it_sc));
1696  const auto& hits_and_fractions = simCluster.hits_and_fractions();
1697  for (const auto& it_haf : hits_and_fractions) {
1698  const DetId hitid = (it_haf.first);
1699  const int cpLayerId =
1700  recHitTools_->getLayerWithOffset(hitid) + layers * ((recHitTools_->zside(hitid) + 1) >> 1) - 1;
1701  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(hitid);
1702  if (itcheck != hitMap.end()) {
1703  const HGCRecHit* hit = itcheck->second;
1704  cPEnergyOnLayer[cpLayerId] += it_haf.second * hit->energy();
1705  }
1706  }
1707  }
1708 
1709  for (unsigned int layerId = 0; layerId < layers * 2; ++layerId) {
1710  if (!cPEnergyOnLayer[layerId])
1711  continue;
1712 
1713  histograms.h_denom_caloparticle_eta_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
1714  histograms.h_denom_caloparticle_phi_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
1715 
1716  if (lcsIt == cPOnLayerMap.end())
1717  continue;
1718  const auto& lcs = lcsIt->val;
1719 
1720  auto getLCLayerId = [&](const unsigned int lcId) {
1721  const std::vector<std::pair<DetId, float>>& hits_and_fractions = clusters[lcId].hitsAndFractions();
1722  const auto firstHitDetId = hits_and_fractions[0].first;
1723  const unsigned int lcLayerId = recHitTools_->getLayerWithOffset(firstHitDetId) +
1724  layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
1725  return lcLayerId;
1726  };
1727 
1728  for (const auto& lcPair : lcs) {
1729  if (getLCLayerId(lcPair.first.index()) != layerId)
1730  continue;
1731  histograms.h_score_caloparticle2layercl_perlayer.at(layerId)->Fill(lcPair.second.second);
1732  histograms.h_sharedenergy_caloparticle2layercl_perlayer.at(layerId)->Fill(
1733  lcPair.second.first / cPEnergyOnLayer[layerId], cPEnergyOnLayer[layerId]);
1734  histograms.h_energy_vs_score_caloparticle2layercl_perlayer.at(layerId)->Fill(
1735  lcPair.second.second, lcPair.second.first / cPEnergyOnLayer[layerId]);
1736  }
1737  const auto assoc = std::count_if(std::begin(lcs), std::end(lcs), [&](const auto& obj) {
1738  if (getLCLayerId(obj.first.index()) != layerId)
1739  return false;
1740  else
1741  return obj.second.second < ScoreCutCPtoLC_;
1742  });
1743  if (assoc) {
1744  histograms.h_num_caloparticle_eta_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
1745  histograms.h_num_caloparticle_phi_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
1746  if (assoc > 1) {
1747  histograms.h_numDup_caloparticle_eta_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
1748  histograms.h_numDup_caloparticle_phi_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
1749  }
1750  const auto best = std::min_element(std::begin(lcs), std::end(lcs), [&](const auto& obj1, const auto& obj2) {
1751  if (getLCLayerId(obj1.first.index()) != layerId)
1752  return false;
1753  else if (getLCLayerId(obj2.first.index()) == layerId)
1754  return obj1.second.second < obj2.second.second;
1755  else
1756  return true;
1757  });
1758  histograms.h_sharedenergy_caloparticle2layercl_vs_eta_perlayer.at(layerId)->Fill(
1759  cP[cpId].g4Tracks()[0].momentum().eta(), best->second.first / cPEnergyOnLayer[layerId]);
1760  histograms.h_sharedenergy_caloparticle2layercl_vs_phi_perlayer.at(layerId)->Fill(
1761  cP[cpId].g4Tracks()[0].momentum().phi(), best->second.first / cPEnergyOnLayer[layerId]);
1762  }
1763  }
1764  }
1765 }
1766 
1768  const Histograms& histograms,
1769  int count,
1772  edm::Handle<std::vector<SimCluster>> simClusterHandle,
1773  std::vector<SimCluster> const& sC,
1774  std::vector<size_t> const& sCIndices,
1775  const std::vector<float>& mask,
1776  std::unordered_map<DetId, const HGCRecHit*> const& hitMap,
1777  unsigned int layers,
1778  const hgcal::RecoToSimCollectionWithSimClusters& scsInLayerClusterMap,
1779  const hgcal::SimToRecoCollectionWithSimClusters& lcsInSimClusterMap) const {
1780  auto nLayerClusters = clusters.size();
1781 
1782  // Here we do fill the plots to compute the different metrics linked to
1783  // reco-level, namely fake-rate and merge-rate. In this loop we should *not*
1784  // restrict only to the selected simClusters.
1785  for (unsigned int lcId = 0; lcId < nLayerClusters; ++lcId) {
1786  if (mask[lcId] != 0.) {
1787  LogDebug("HGCalValidator") << "Skipping layer cluster " << lcId << " not belonging to mask" << std::endl;
1788  continue;
1789  }
1790  const std::vector<std::pair<DetId, float>>& hits_and_fractions = clusters[lcId].hitsAndFractions();
1791  const auto firstHitDetId = hits_and_fractions[0].first;
1792  const int lcLayerId =
1793  recHitTools_->getLayerWithOffset(firstHitDetId) + layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
1794  //Although the ones below are already created in the LC to CP association, we will
1795  //recreate them here since in the post processor it looks in a specific directory.
1796  histograms.h_denom_layercl_in_simcl_eta_perlayer[count].at(lcLayerId)->Fill(clusters[lcId].eta());
1797  histograms.h_denom_layercl_in_simcl_phi_perlayer[count].at(lcLayerId)->Fill(clusters[lcId].phi());
1798  //
1799  const edm::Ref<reco::CaloClusterCollection> lcRef(clusterHandle, lcId);
1800  const auto& scsIt = scsInLayerClusterMap.find(lcRef);
1801  if (scsIt == scsInLayerClusterMap.end())
1802  continue;
1803 
1804  const auto& scs = scsIt->val;
1805  // If a reconstructed LayerCluster has energy 0 but is linked to at least a
1806  // SimCluster, then his score should be 1 as set in the associator
1807  if (clusters[lcId].energy() == 0. && !scs.empty()) {
1808  for (const auto& scPair : scs) {
1809  histograms.h_score_layercl2simcluster_perlayer[count].at(lcLayerId)->Fill(scPair.second);
1810  }
1811  continue;
1812  }
1813  //Loop through all simClusters linked to the layer cluster under study
1814  for (const auto& scPair : scs) {
1815  LogDebug("HGCalValidator") << "layerCluster Id: \t" << lcId << "\t SC id: \t" << scPair.first.index()
1816  << "\t score \t" << scPair.second << std::endl;
1817  //This should be filled #layerclusters in layer x #linked SimClusters
1818  histograms.h_score_layercl2simcluster_perlayer[count].at(lcLayerId)->Fill(scPair.second);
1819  auto const& sc_linked =
1820  std::find_if(std::begin(lcsInSimClusterMap[scPair.first]),
1821  std::end(lcsInSimClusterMap[scPair.first]),
1822  [&lcRef](const std::pair<edm::Ref<reco::CaloClusterCollection>, std::pair<float, float>>& p) {
1823  return p.first == lcRef;
1824  });
1825  if (sc_linked ==
1826  lcsInSimClusterMap[scPair.first].end()) // This should never happen by construction of the association maps
1827  continue;
1828  histograms.h_sharedenergy_layercl2simcluster_perlayer[count].at(lcLayerId)->Fill(
1829  sc_linked->second.first / clusters[lcId].energy(), clusters[lcId].energy());
1830  histograms.h_energy_vs_score_layercl2simcluster_perlayer[count].at(lcLayerId)->Fill(
1831  scPair.second, sc_linked->second.first / clusters[lcId].energy());
1832  }
1833  //Here he counts how many of the linked simclusters of the layer cluster under study have a score above a certain value.
1834  const auto assoc =
1835  std::count_if(std::begin(scs), std::end(scs), [](const auto& obj) { return obj.second < ScoreCutLCtoSC_; });
1836  if (assoc) {
1837  histograms.h_num_layercl_in_simcl_eta_perlayer[count].at(lcLayerId)->Fill(clusters[lcId].eta());
1838  histograms.h_num_layercl_in_simcl_phi_perlayer[count].at(lcLayerId)->Fill(clusters[lcId].phi());
1839  if (assoc > 1) {
1840  histograms.h_numMerge_layercl_in_simcl_eta_perlayer[count].at(lcLayerId)->Fill(clusters[lcId].eta());
1841  histograms.h_numMerge_layercl_in_simcl_phi_perlayer[count].at(lcLayerId)->Fill(clusters[lcId].phi());
1842  }
1843  const auto& best = std::min_element(
1844  std::begin(scs), std::end(scs), [](const auto& obj1, const auto& obj2) { return obj1.second < obj2.second; });
1845  //From all simclusters he founds the one with the best (lowest) score and takes his scId
1846  const auto& best_sc_linked =
1847  std::find_if(std::begin(lcsInSimClusterMap[best->first]),
1848  std::end(lcsInSimClusterMap[best->first]),
1849  [&lcRef](const std::pair<edm::Ref<reco::CaloClusterCollection>, std::pair<float, float>>& p) {
1850  return p.first == lcRef;
1851  });
1852  if (best_sc_linked ==
1853  lcsInSimClusterMap[best->first].end()) // This should never happen by construction of the association maps
1854  continue;
1855  histograms.h_sharedenergy_layercl2simcluster_vs_eta_perlayer[count].at(lcLayerId)->Fill(
1856  clusters[lcId].eta(), best_sc_linked->second.first / clusters[lcId].energy());
1857  histograms.h_sharedenergy_layercl2simcluster_vs_phi_perlayer[count].at(lcLayerId)->Fill(
1858  clusters[lcId].phi(), best_sc_linked->second.first / clusters[lcId].energy());
1859  }
1860  } // End of loop over LayerClusters
1861 
1862  // Here we do fill the plots to compute the different metrics linked to
1863  // gen-level, namely efficiency and duplicate. In this loop we should restrict
1864  // only to the selected simClusters.
1865  for (const auto& scId : sCIndices) {
1866  const edm::Ref<SimClusterCollection> scRef(simClusterHandle, scId);
1867  const auto& lcsIt = lcsInSimClusterMap.find(scRef);
1868 
1869  std::map<unsigned int, float> sCEnergyOnLayer;
1870  for (unsigned int layerId = 0; layerId < layers * 2; ++layerId)
1871  sCEnergyOnLayer[layerId] = 0;
1872 
1873  const auto& hits_and_fractions = sC[scId].hits_and_fractions();
1874  for (const auto& it_haf : hits_and_fractions) {
1875  const DetId hitid = (it_haf.first);
1876  const int scLayerId =
1877  recHitTools_->getLayerWithOffset(hitid) + layers * ((recHitTools_->zside(hitid) + 1) >> 1) - 1;
1878  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(hitid);
1879  if (itcheck != hitMap.end()) {
1880  const HGCRecHit* hit = itcheck->second;
1881  sCEnergyOnLayer[scLayerId] += it_haf.second * hit->energy();
1882  }
1883  }
1884 
1885  for (unsigned int layerId = 0; layerId < layers * 2; ++layerId) {
1886  if (!sCEnergyOnLayer[layerId])
1887  continue;
1888 
1889  histograms.h_denom_simcluster_eta_perlayer[count].at(layerId)->Fill(sC[scId].eta());
1890  histograms.h_denom_simcluster_phi_perlayer[count].at(layerId)->Fill(sC[scId].phi());
1891 
1892  if (lcsIt == lcsInSimClusterMap.end())
1893  continue;
1894  const auto& lcs = lcsIt->val;
1895 
1896  auto getLCLayerId = [&](const unsigned int lcId) {
1897  const std::vector<std::pair<DetId, float>>& hits_and_fractions = clusters[lcId].hitsAndFractions();
1898  const auto firstHitDetId = hits_and_fractions[0].first;
1899  const unsigned int lcLayerId = recHitTools_->getLayerWithOffset(firstHitDetId) +
1900  layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
1901  return lcLayerId;
1902  };
1903 
1904  //Loop through layer clusters linked to the simcluster under study
1905  for (const auto& lcPair : lcs) {
1906  auto lcId = lcPair.first.index();
1907  if (mask[lcId] != 0.) {
1908  LogDebug("HGCalValidator") << "Skipping layer cluster " << lcId << " not belonging to mask" << std::endl;
1909  continue;
1910  }
1911 
1912  if (getLCLayerId(lcId) != layerId)
1913  continue;
1914  histograms.h_score_simcluster2layercl_perlayer[count].at(layerId)->Fill(lcPair.second.second);
1915  histograms.h_sharedenergy_simcluster2layercl_perlayer[count].at(layerId)->Fill(
1916  lcPair.second.first / sCEnergyOnLayer[layerId], sCEnergyOnLayer[layerId]);
1917  histograms.h_energy_vs_score_simcluster2layercl_perlayer[count].at(layerId)->Fill(
1918  lcPair.second.second, lcPair.second.first / sCEnergyOnLayer[layerId]);
1919  }
1920  const auto assoc = std::count_if(std::begin(lcs), std::end(lcs), [&](const auto& obj) {
1921  if (getLCLayerId(obj.first.index()) != layerId)
1922  return false;
1923  else
1924  return obj.second.second < ScoreCutSCtoLC_;
1925  });
1926  if (assoc) {
1927  histograms.h_num_simcluster_eta_perlayer[count].at(layerId)->Fill(sC[scId].eta());
1928  histograms.h_num_simcluster_phi_perlayer[count].at(layerId)->Fill(sC[scId].phi());
1929  if (assoc > 1) {
1930  histograms.h_numDup_simcluster_eta_perlayer[count].at(layerId)->Fill(sC[scId].eta());
1931  histograms.h_numDup_simcluster_phi_perlayer[count].at(layerId)->Fill(sC[scId].phi());
1932  }
1933  const auto best = std::min_element(std::begin(lcs), std::end(lcs), [&](const auto& obj1, const auto& obj2) {
1934  if (getLCLayerId(obj1.first.index()) != layerId)
1935  return false;
1936  else if (getLCLayerId(obj2.first.index()) == layerId)
1937  return obj1.second.second < obj2.second.second;
1938  else
1939  return true;
1940  });
1941  histograms.h_sharedenergy_simcluster2layercl_vs_eta_perlayer[count].at(layerId)->Fill(
1942  sC[scId].eta(), best->second.first / sCEnergyOnLayer[layerId]);
1943  histograms.h_sharedenergy_simcluster2layercl_vs_phi_perlayer[count].at(layerId)->Fill(
1944  sC[scId].phi(), best->second.first / sCEnergyOnLayer[layerId]);
1945  }
1946  }
1947  }
1948 }
1949 
1951  int count,
1954  const Density& densities,
1955  edm::Handle<std::vector<CaloParticle>> caloParticleHandle,
1956  std::vector<CaloParticle> const& cP,
1957  std::vector<size_t> const& cPIndices,
1958  std::vector<size_t> const& cPSelectedIndices,
1959  std::unordered_map<DetId, const HGCRecHit*> const& hitMap,
1960  std::map<double, double> cummatbudg,
1961  unsigned int layers,
1962  std::vector<int> thicknesses,
1963  const hgcal::RecoToSimCollection& cpsInLayerClusterMap,
1964  const hgcal::SimToRecoCollection& cPOnLayerMap) const {
1965  //Each event to be treated as two events: an event in +ve endcap,
1966  //plus another event in -ve endcap. In this spirit there will be
1967  //a layer variable (layerid) that maps the layers in :
1968  //-z: 0->51
1969  //+z: 52->103
1970 
1971  //To keep track of total num of layer clusters per layer
1972  //tnlcpl[layerid]
1973  std::vector<int> tnlcpl(1000, 0); //tnlcpl.clear(); tnlcpl.reserve(1000);
1974 
1975  //To keep track of the total num of clusters per thickness in plus and in minus endcaps
1976  std::map<std::string, int> tnlcpthplus;
1977  tnlcpthplus.clear();
1978  std::map<std::string, int> tnlcpthminus;
1979  tnlcpthminus.clear();
1980  //At the beginning of the event all layers should be initialized to zero total clusters per thickness
1981  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
1982  tnlcpthplus.insert(std::pair<std::string, int>(std::to_string(*it), 0));
1983  tnlcpthminus.insert(std::pair<std::string, int>(std::to_string(*it), 0));
1984  }
1985  //To keep track of the total num of clusters with mixed thickness hits per event
1986  tnlcpthplus.insert(std::pair<std::string, int>("mixed", 0));
1987  tnlcpthminus.insert(std::pair<std::string, int>("mixed", 0));
1988 
1990  clusterHandle,
1991  clusters,
1992  caloParticleHandle,
1993  cP,
1994  cPIndices,
1995  cPSelectedIndices,
1996  hitMap,
1997  layers,
1998  cpsInLayerClusterMap,
1999  cPOnLayerMap);
2000 
2001  //To find out the total amount of energy clustered per layer
2002  //Initialize with zeros because I see clear gives weird numbers.
2003  std::vector<double> tecpl(1000, 0.0); //tecpl.clear(); tecpl.reserve(1000);
2004  //for the longitudinal depth barycenter
2005  std::vector<double> ldbar(1000, 0.0); //ldbar.clear(); ldbar.reserve(1000);
2006 
2007  //We need to compare with the total amount of energy coming from caloparticles
2008  double caloparteneplus = 0.;
2009  double caloparteneminus = 0.;
2010  for (const auto& cpId : cPIndices) {
2011  if (cP[cpId].eta() >= 0.) {
2012  caloparteneplus = caloparteneplus + cP[cpId].energy();
2013  }
2014  if (cP[cpId].eta() < 0.) {
2015  caloparteneminus = caloparteneminus + cP[cpId].energy();
2016  }
2017  }
2018 
2019  //loop through clusters of the event
2020  for (unsigned int layerclusterIndex = 0; layerclusterIndex < clusters.size(); layerclusterIndex++) {
2021  const std::vector<std::pair<DetId, float>> hits_and_fractions = clusters[layerclusterIndex].hitsAndFractions();
2022 
2023  const DetId seedid = clusters[layerclusterIndex].seed();
2024  const double seedx = recHitTools_->getPosition(seedid).x();
2025  const double seedy = recHitTools_->getPosition(seedid).y();
2026  DetId maxid = findmaxhit(clusters[layerclusterIndex], hitMap);
2027 
2028  // const DetId maxid = clusters[layerclusterIndex].max();
2029  double maxx = recHitTools_->getPosition(maxid).x();
2030  double maxy = recHitTools_->getPosition(maxid).y();
2031 
2032  //Auxillary variables to count the number of different kind of hits in each cluster
2033  int nthhits120p = 0;
2034  int nthhits200p = 0;
2035  int nthhits300p = 0;
2036  int nthhitsscintp = 0;
2037  int nthhits120m = 0;
2038  int nthhits200m = 0;
2039  int nthhits300m = 0;
2040  int nthhitsscintm = 0;
2041  //For the hits thickness of the layer cluster.
2042  double thickness = 0.;
2043  //The layer the cluster belongs to. As mentioned in the mapping above, it takes into account -z and +z.
2044  int layerid = 0;
2045  //We will need another layer variable for the longitudinal material budget file reading.
2046  //In this case we need no distinction between -z and +z.
2047  int lay = 0;
2048  //We will need here to save the combination thick_lay
2049  std::string istr = "";
2050  //boolean to check for the layer that the cluster belong to. Maybe later will check all the layer hits.
2051  bool cluslay = true;
2052  //zside that the current cluster belongs to.
2053  int zside = 0;
2054 
2055  for (std::vector<std::pair<DetId, float>>::const_iterator it_haf = hits_and_fractions.begin();
2056  it_haf != hits_and_fractions.end();
2057  ++it_haf) {
2058  const DetId rh_detid = it_haf->first;
2059  //The layer that the current hit belongs to
2060  layerid = recHitTools_->getLayerWithOffset(rh_detid) + layers * ((recHitTools_->zside(rh_detid) + 1) >> 1) - 1;
2061  lay = recHitTools_->getLayerWithOffset(rh_detid);
2062  zside = recHitTools_->zside(rh_detid);
2063  if (rh_detid.det() == DetId::Forward || rh_detid.det() == DetId::HGCalEE || rh_detid.det() == DetId::HGCalHSi) {
2064  thickness = recHitTools_->getSiThickness(rh_detid);
2065  } else if (rh_detid.det() == DetId::HGCalHSc) {
2066  thickness = -1;
2067  } else {
2068  LogDebug("HGCalValidator") << "These are HGCal layer clusters, you shouldn't be here !!! " << layerid << "\n";
2069  continue;
2070  }
2071 
2072  //Count here only once the layer cluster and save the combination thick_layerid
2073  std::string curistr = std::to_string((int)thickness);
2074  std::string lay_string = std::to_string(layerid);
2075  while (lay_string.size() < 2)
2076  lay_string.insert(0, "0");
2077  curistr += "_" + lay_string;
2078  if (cluslay) {
2079  tnlcpl[layerid]++;
2080  istr = curistr;
2081  cluslay = false;
2082  }
2083 
2084  if ((thickness == 120.) && (recHitTools_->zside(rh_detid) > 0.)) {
2085  nthhits120p++;
2086  } else if ((thickness == 120.) && (recHitTools_->zside(rh_detid) < 0.)) {
2087  nthhits120m++;
2088  } else if ((thickness == 200.) && (recHitTools_->zside(rh_detid) > 0.)) {
2089  nthhits200p++;
2090  } else if ((thickness == 200.) && (recHitTools_->zside(rh_detid) < 0.)) {
2091  nthhits200m++;
2092  } else if ((thickness == 300.) && (recHitTools_->zside(rh_detid) > 0.)) {
2093  nthhits300p++;
2094  } else if ((thickness == 300.) && (recHitTools_->zside(rh_detid) < 0.)) {
2095  nthhits300m++;
2096  } else if ((thickness == -1) && (recHitTools_->zside(rh_detid) > 0.)) {
2097  nthhitsscintp++;
2098  } else if ((thickness == -1) && (recHitTools_->zside(rh_detid) < 0.)) {
2099  nthhitsscintm++;
2100  } else { //assert(0);
2101  LogDebug("HGCalValidator")
2102  << " You are running a geometry that contains thicknesses different than the normal ones. "
2103  << "\n";
2104  }
2105 
2106  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
2107  if (itcheck == hitMap.end()) {
2108  LogDebug("HGCalValidator") << " You shouldn't be here - Unable to find a hit " << rh_detid.rawId() << " "
2109  << rh_detid.det() << " " << HGCalDetId(rh_detid) << "\n";
2110  continue;
2111  }
2112 
2113  const HGCRecHit* hit = itcheck->second;
2114 
2115  //Here for the per cell plots
2116  //----
2117  const double hit_x = recHitTools_->getPosition(rh_detid).x();
2118  const double hit_y = recHitTools_->getPosition(rh_detid).y();
2119  double distancetoseed = distance(seedx, seedy, hit_x, hit_y);
2120  double distancetomax = distance(maxx, maxy, hit_x, hit_y);
2121  if (distancetoseed != 0. && histograms.h_distancetoseedcell_perthickperlayer.count(curistr)) {
2122  histograms.h_distancetoseedcell_perthickperlayer.at(curistr)->Fill(distancetoseed);
2123  }
2124  //----
2125  if (distancetoseed != 0. && histograms.h_distancetoseedcell_perthickperlayer_eneweighted.count(curistr)) {
2126  histograms.h_distancetoseedcell_perthickperlayer_eneweighted.at(curistr)->Fill(distancetoseed, hit->energy());
2127  }
2128  //----
2129  if (distancetomax != 0. && histograms.h_distancetomaxcell_perthickperlayer.count(curistr)) {
2130  histograms.h_distancetomaxcell_perthickperlayer.at(curistr)->Fill(distancetomax);
2131  }
2132  //----
2133  if (distancetomax != 0. && histograms.h_distancetomaxcell_perthickperlayer_eneweighted.count(curistr)) {
2134  histograms.h_distancetomaxcell_perthickperlayer_eneweighted.at(curistr)->Fill(distancetomax, hit->energy());
2135  }
2136 
2137  //Let's check the density
2138  std::map<DetId, float>::const_iterator dit = densities.find(rh_detid);
2139  if (dit == densities.end()) {
2140  LogDebug("HGCalValidator") << " You shouldn't be here - Unable to find a density " << rh_detid.rawId() << " "
2141  << rh_detid.det() << " " << HGCalDetId(rh_detid) << "\n";
2142  continue;
2143  }
2144 
2145  if (histograms.h_cellsenedens_perthick.count((int)thickness)) {
2146  histograms.h_cellsenedens_perthick.at((int)thickness)->Fill(dit->second);
2147  }
2148 
2149  } // end of loop through hits and fractions
2150 
2151  //Check for simultaneously having hits of different kind. Checking at least two combinations is sufficient.
2152  if ((nthhits120p != 0 && nthhits200p != 0) || (nthhits120p != 0 && nthhits300p != 0) ||
2153  (nthhits120p != 0 && nthhitsscintp != 0) || (nthhits200p != 0 && nthhits300p != 0) ||
2154  (nthhits200p != 0 && nthhitsscintp != 0) || (nthhits300p != 0 && nthhitsscintp != 0)) {
2155  tnlcpthplus["mixed"]++;
2156  } else if ((nthhits120p != 0 || nthhits200p != 0 || nthhits300p != 0 || nthhitsscintp != 0)) {
2157  //This is a cluster with hits of one kind
2158  tnlcpthplus[std::to_string((int)thickness)]++;
2159  }
2160  if ((nthhits120m != 0 && nthhits200m != 0) || (nthhits120m != 0 && nthhits300m != 0) ||
2161  (nthhits120m != 0 && nthhitsscintm != 0) || (nthhits200m != 0 && nthhits300m != 0) ||
2162  (nthhits200m != 0 && nthhitsscintm != 0) || (nthhits300m != 0 && nthhitsscintm != 0)) {
2163  tnlcpthminus["mixed"]++;
2164  } else if ((nthhits120m != 0 || nthhits200m != 0 || nthhits300m != 0 || nthhitsscintm != 0)) {
2165  //This is a cluster with hits of one kind
2166  tnlcpthminus[std::to_string((int)thickness)]++;
2167  }
2168 
2169  //To find the thickness with the biggest amount of cells
2170  std::vector<int> bigamoth;
2171  bigamoth.clear();
2172  if (zside > 0) {
2173  bigamoth.push_back(nthhits120p);
2174  bigamoth.push_back(nthhits200p);
2175  bigamoth.push_back(nthhits300p);
2176  bigamoth.push_back(nthhitsscintp);
2177  }
2178  if (zside < 0) {
2179  bigamoth.push_back(nthhits120m);
2180  bigamoth.push_back(nthhits200m);
2181  bigamoth.push_back(nthhits300m);
2182  bigamoth.push_back(nthhitsscintm);
2183  }
2184  auto bgth = std::max_element(bigamoth.begin(), bigamoth.end());
2185  istr = std::to_string(thicknesses[std::distance(bigamoth.begin(), bgth)]);
2186  std::string lay_string = std::to_string(layerid);
2187  while (lay_string.size() < 2)
2188  lay_string.insert(0, "0");
2189  istr += "_" + lay_string;
2190 
2191  //Here for the per cluster plots that need the thickness_layer info
2192  if (histograms.h_cellsnum_perthickperlayer.count(istr)) {
2193  histograms.h_cellsnum_perthickperlayer.at(istr)->Fill(hits_and_fractions.size());
2194  }
2195 
2196  //Now, with the distance between seed and max cell.
2197  double distancebetseedandmax = distance(seedx, seedy, maxx, maxy);
2198  //The thickness_layer combination in this case will use the thickness of the seed as a convention.
2199  std::string seedstr = std::to_string((int)recHitTools_->getSiThickness(seedid)) + "_" + std::to_string(layerid);
2200  seedstr += "_" + lay_string;
2201  if (histograms.h_distancebetseedandmaxcell_perthickperlayer.count(seedstr)) {
2202  histograms.h_distancebetseedandmaxcell_perthickperlayer.at(seedstr)->Fill(distancebetseedandmax);
2203  }
2204  if (histograms.h_distancebetseedandmaxcellvsclusterenergy_perthickperlayer.count(seedstr)) {
2205  histograms.h_distancebetseedandmaxcellvsclusterenergy_perthickperlayer.at(seedstr)->Fill(
2206  distancebetseedandmax, clusters[layerclusterIndex].energy());
2207  }
2208 
2209  //Energy clustered per layer
2210  tecpl[layerid] = tecpl[layerid] + clusters[layerclusterIndex].energy();
2211  ldbar[layerid] = ldbar[layerid] + clusters[layerclusterIndex].energy() * cummatbudg[(double)lay];
2212 
2213  } //end of loop through clusters of the event
2214 
2215  //After the end of the event we can now fill with the results.
2216  //First a couple of variables to keep the sum of the energy of all clusters
2217  double sumeneallcluspl = 0.;
2218  double sumeneallclusmi = 0.;
2219  //And the longitudinal variable
2220  double sumldbarpl = 0.;
2221  double sumldbarmi = 0.;
2222  //Per layer : Loop 0->103
2223  for (unsigned ilayer = 0; ilayer < layers * 2; ++ilayer) {
2224  if (histograms.h_clusternum_perlayer.count(ilayer)) {
2225  histograms.h_clusternum_perlayer.at(ilayer)->Fill(tnlcpl[ilayer]);
2226  }
2227  // Two times one for plus and one for minus
2228  //First with the -z endcap
2229  if (ilayer < layers) {
2230  if (histograms.h_energyclustered_perlayer.count(ilayer)) {
2231  if (caloparteneminus != 0.) {
2232  histograms.h_energyclustered_perlayer.at(ilayer)->Fill(100. * tecpl[ilayer] / caloparteneminus);
2233  }
2234  }
2235  //Keep here the total energy for the event in -z
2236  sumeneallclusmi = sumeneallclusmi + tecpl[ilayer];
2237  //And for the longitudinal variable
2238  sumldbarmi = sumldbarmi + ldbar[ilayer];
2239  } else { //Then for the +z
2240  if (histograms.h_energyclustered_perlayer.count(ilayer)) {
2241  if (caloparteneplus != 0.) {
2242  histograms.h_energyclustered_perlayer.at(ilayer)->Fill(100. * tecpl[ilayer] / caloparteneplus);
2243  }
2244  }
2245  //Keep here the total energy for the event in -z
2246  sumeneallcluspl = sumeneallcluspl + tecpl[ilayer];
2247  //And for the longitudinal variable
2248  sumldbarpl = sumldbarpl + ldbar[ilayer];
2249  } //end of +z loop
2250 
2251  } //end of loop over layers
2252 
2253  //Per thickness
2254  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
2255  if (histograms.h_clusternum_perthick.count(*it)) {
2256  histograms.h_clusternum_perthick.at(*it)->Fill(tnlcpthplus[std::to_string(*it)]);
2257  histograms.h_clusternum_perthick.at(*it)->Fill(tnlcpthminus[std::to_string(*it)]);
2258  }
2259  }
2260  //Mixed thickness clusters
2261  histograms.h_mixedhitscluster_zplus[count]->Fill(tnlcpthplus["mixed"]);
2262  histograms.h_mixedhitscluster_zminus[count]->Fill(tnlcpthminus["mixed"]);
2263 
2264  //Total energy clustered from all layer clusters (fraction)
2265  if (caloparteneplus != 0.) {
2266  histograms.h_energyclustered_zplus[count]->Fill(100. * sumeneallcluspl / caloparteneplus);
2267  }
2268  if (caloparteneminus != 0.) {
2269  histograms.h_energyclustered_zminus[count]->Fill(100. * sumeneallclusmi / caloparteneminus);
2270  }
2271 
2272  //For the longitudinal depth barycenter
2273  histograms.h_longdepthbarycentre_zplus[count]->Fill(sumldbarpl / sumeneallcluspl);
2274  histograms.h_longdepthbarycentre_zminus[count]->Fill(sumldbarmi / sumeneallclusmi);
2275 }
2276 
2278  int count,
2279  const std::vector<reco::HGCalMultiCluster>& multiClusters,
2280  std::vector<CaloParticle> const& cP,
2281  std::vector<size_t> const& cPIndices,
2282  std::vector<size_t> const& cPSelectedIndices,
2283  std::unordered_map<DetId, const HGCRecHit*> const& hitMap,
2284  unsigned int layers) const {
2285  auto nMultiClusters = multiClusters.size();
2286  //Consider CaloParticles coming from the hard scatterer, excluding the PU contribution.
2287  auto nCaloParticles = cPIndices.size();
2288 
2289  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>> detIdToCaloParticleId_Map;
2290  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInMultiCluster>> detIdToMultiClusterId_Map;
2291  std::vector<int> tracksters_fakemerge(nMultiClusters, 0);
2292  std::vector<int> tracksters_duplicate(nMultiClusters, 0);
2293 
2294  // this contains the ids of the caloparticles contributing with at least one hit to the multi cluster and the reconstruction error
2295  //cpsInLayerCluster[multicluster][CPids]
2296  //Connects a multicluster with all related caloparticles.
2297  std::vector<std::vector<std::pair<unsigned int, float>>> cpsInMultiCluster;
2298  cpsInMultiCluster.resize(nMultiClusters);
2299 
2300  //cPOnLayer[caloparticle][layer]
2301  //This defines a "calo particle on layer" concept. It is only filled in case
2302  //that calo particle has a reconstructed hit related via detid. So, a cPOnLayer[i][j] connects a
2303  //specific calo particle i in layer j with:
2304  //1. the sum of all rechits energy times fraction of the relevant simhit in layer j related to that calo particle i.
2305  //2. the hits and fractions of that calo particle i in layer j.
2306  //3. the layer clusters with matched rechit id.
2307  std::unordered_map<int, std::vector<caloParticleOnLayer>> cPOnLayer;
2308  for (unsigned int i = 0; i < nCaloParticles; ++i) {
2309  auto cpIndex = cPIndices[i];
2310  cPOnLayer[cpIndex].resize(layers * 2);
2311  for (unsigned int j = 0; j < layers * 2; ++j) {
2312  cPOnLayer[cpIndex][j].caloParticleId = cpIndex;
2313  cPOnLayer[cpIndex][j].energy = 0.f;
2314  cPOnLayer[cpIndex][j].hits_and_fractions.clear();
2315  }
2316  }
2317 
2318  for (const auto& cpId : cPIndices) {
2319  //take sim clusters
2320  const SimClusterRefVector& simClusterRefVector = cP[cpId].simClusters();
2321  //loop through sim clusters
2322  for (const auto& it_sc : simClusterRefVector) {
2323  const SimCluster& simCluster = (*(it_sc));
2324  const auto& hits_and_fractions = simCluster.hits_and_fractions();
2325  for (const auto& it_haf : hits_and_fractions) {
2326  DetId hitid = (it_haf.first);
2327  //V9:maps the layers in -z: 0->51 and in +z: 52->103
2328  //V10:maps the layers in -z: 0->49 and in +z: 50->99
2329  int cpLayerId = recHitTools_->getLayerWithOffset(hitid) + layers * ((recHitTools_->zside(hitid) + 1) >> 1) - 1;
2330  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(hitid);
2331  //Checks whether the current hit belonging to sim cluster has a reconstructed hit.
2332  if (itcheck != hitMap.end()) {
2333  const HGCRecHit* hit = itcheck->second;
2334  //Since the current hit from sim cluster has a reconstructed hit with the same detid,
2335  //make a map that will connect a detid with:
2336  //1. the caloparticles that have a simcluster with sim hits in that cell via caloparticle id.
2337  //2. the sum of all simhits fractions that contributes to that detid.
2338  //So, keep in mind that in case of multiple caloparticles contributing in the same cell
2339  //the fraction is the sum over all calo particles. So, something like:
2340  //detid: (caloparticle 1, sum of hits fractions in that detid over all cp) , (caloparticle 2, sum of hits fractions in that detid over all cp), (caloparticle 3, sum of hits fractions in that detid over all cp) ...
2341  auto hit_find_it = detIdToCaloParticleId_Map.find(hitid);
2342  if (hit_find_it == detIdToCaloParticleId_Map.end()) {
2343  detIdToCaloParticleId_Map[hitid] = std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>();
2344  detIdToCaloParticleId_Map[hitid].emplace_back(
2345  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
2346  } else {
2347  auto findHitIt = std::find(detIdToCaloParticleId_Map[hitid].begin(),
2348  detIdToCaloParticleId_Map[hitid].end(),
2349  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
2350  if (findHitIt != detIdToCaloParticleId_Map[hitid].end()) {
2351  findHitIt->fraction += it_haf.second;
2352  } else {
2353  detIdToCaloParticleId_Map[hitid].emplace_back(
2354  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
2355  }
2356  }
2357  //Since the current hit from sim cluster has a reconstructed hit with the same detid,
2358  //fill the cPOnLayer[caloparticle][layer] object with energy (sum of all rechits energy times fraction
2359  //of the relevant simhit) and keep the hit (detid and fraction) that contributed.
2360  cPOnLayer[cpId][cpLayerId].energy += it_haf.second * hit->energy();
2361  // We need to compress the hits and fractions in order to have a
2362  // reasonable score between CP and LC. Imagine, for example, that a
2363  // CP has detID X used by 2 SimClusters with different fractions. If
2364  // a single LC uses X with fraction 1 and is compared to the 2
2365  // contributions separately, it will be assigned a score != 0, which
2366  // is wrong.
2367  auto& haf = cPOnLayer[cpId][cpLayerId].hits_and_fractions;
2368  auto found = std::find_if(
2369  std::begin(haf), std::end(haf), [&hitid](const std::pair<DetId, float>& v) { return v.first == hitid; });
2370  if (found != haf.end()) {
2371  found->second += it_haf.second;
2372  } else {
2373  cPOnLayer[cpId][cpLayerId].hits_and_fractions.emplace_back(hitid, it_haf.second);
2374  }
2375  }
2376  } // end of loop through simhits
2377  } // end of loop through simclusters
2378  } // end of loop through caloparticles
2379 
2380  //Loop through multiclusters
2381  for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
2382  const auto& hits_and_fractions = multiClusters[mclId].hitsAndFractions();
2383  if (!hits_and_fractions.empty()) {
2384  std::unordered_map<unsigned, float> CPEnergyInMCL;
2385  int maxCPId_byNumberOfHits = -1;
2386  unsigned int maxCPNumberOfHitsInMCL = 0;
2387  int maxCPId_byEnergy = -1;
2388  float maxEnergySharedMCLandCP = 0.f;
2389  float energyFractionOfMCLinCP = 0.f;
2390  float energyFractionOfCPinMCL = 0.f;
2391 
2392  //In case of matched rechit-simhit, so matched
2393  //caloparticle-layercluster-multicluster, he counts and saves the number of
2394  //rechits related to the maximum energy CaloParticle out of all
2395  //CaloParticles related to that layer cluster and multicluster.
2396 
2397  std::unordered_map<unsigned, unsigned> occurrencesCPinMCL;
2398  unsigned int numberOfNoiseHitsInMCL = 0;
2399  unsigned int numberOfHaloHitsInMCL = 0;
2400  unsigned int numberOfHitsInMCL = 0;
2401 
2402  //number of hits related to that cluster.
2403  unsigned int numberOfHitsInLC = hits_and_fractions.size();
2404  numberOfHitsInMCL += numberOfHitsInLC;
2405  std::unordered_map<unsigned, float> CPEnergyInLC;
2406 
2407  //hitsToCaloParticleId is a vector of ints, one for each rechit of the
2408  //layer cluster under study. If negative, there is no simhit from any CaloParticle related.
2409  //If positive, at least one CaloParticle has been found with matched simhit.
2410  //In more detail:
2411  // 1. hitsToCaloParticleId[hitId] = -3
2412  // TN: These represent Halo Cells(N) that have not been
2413  // assigned to any CaloParticle (hence the T).
2414  // 2. hitsToCaloParticleId[hitId] = -2
2415  // FN: There represent Halo Cells(N) that have been assigned
2416  // to a CaloParticle (hence the F, since those should have not been marked as halo)
2417  // 3. hitsToCaloParticleId[hitId] = -1
2418  // FP: These represent Real Cells(P) that have not been
2419  // assigned to any CaloParticle (hence the F, since these are fakes)
2420  // 4. hitsToCaloParticleId[hitId] >= 0
2421  // TP There represent Real Cells(P) that have been assigned
2422  // to a CaloParticle (hence the T)
2423 
2424  std::vector<int> hitsToCaloParticleId(numberOfHitsInLC);
2425  //det id of the first hit just to make the lcLayerId variable
2426  //which maps the layers in -z: 0->51 and in +z: 52->103
2427  const auto firstHitDetId = hits_and_fractions[0].first;
2428  int lcLayerId = recHitTools_->getLayerWithOffset(firstHitDetId) +
2429  layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
2430 
2431  //Loop though the hits of the layer cluster under study
2432  for (unsigned int hitId = 0; hitId < numberOfHitsInLC; hitId++) {
2433  DetId rh_detid = hits_and_fractions[hitId].first;
2434  auto rhFraction = hits_and_fractions[hitId].second;
2435 
2436  //Since the hit is belonging to the layer cluster, it must also be in the rechits map.
2437  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
2438  const HGCRecHit* hit = itcheck->second;
2439 
2440  //Make a map that will connect a detid (that belongs to a rechit of the layer cluster under study,
2441  //no need to save others) with:
2442  //1. the layer clusters that have rechits in that detid
2443  //2. the fraction of the rechit of each layer cluster that contributes to that detid.
2444  //So, something like:
2445  //detid: (layer cluster 1, hit fraction) , (layer cluster 2, hit fraction), (layer cluster 3, hit fraction) ...
2446  //here comparing with the calo particle map above the
2447  auto hit_find_in_LC = detIdToMultiClusterId_Map.find(rh_detid);
2448  if (hit_find_in_LC == detIdToMultiClusterId_Map.end()) {
2449  detIdToMultiClusterId_Map[rh_detid] = std::vector<HGVHistoProducerAlgo::detIdInfoInMultiCluster>();
2450  }
2451  detIdToMultiClusterId_Map[rh_detid].emplace_back(
2452  HGVHistoProducerAlgo::detIdInfoInMultiCluster{mclId, mclId, rhFraction});
2453 
2454  //Check whether the rechit of the layer cluster under study has a sim hit in the same cell.
2455  auto hit_find_in_CP = detIdToCaloParticleId_Map.find(rh_detid);
2456 
2457  // if the fraction is zero or the hit does not belong to any calo
2458  // particle, set the caloparticleId for the hit to -1 this will
2459  // contribute to the number of noise hits
2460 
2461  // MR Remove the case in which the fraction is 0, since this could be a
2462  // real hit that has been marked as halo.
2463  if (rhFraction == 0.) {
2464  hitsToCaloParticleId[hitId] = -2;
2465  numberOfHaloHitsInMCL++;
2466  }
2467  if (hit_find_in_CP == detIdToCaloParticleId_Map.end()) {
2468  hitsToCaloParticleId[hitId] -= 1;
2469  } else {
2470  auto maxCPEnergyInLC = 0.f;
2471  auto maxCPId = -1;
2472  for (auto& h : hit_find_in_CP->second) {
2473  auto shared_fraction = std::min(rhFraction, h.fraction);
2474  //We are in the case where there are calo particles with simhits connected via detid with the rechit under study
2475  //So, from all layers clusters, find the rechits that are connected with a calo particle and save/calculate the
2476  //energy of that calo particle as the sum over all rechits of the rechits energy weighted
2477  //by the caloparticle's fraction related to that rechit.
2478  CPEnergyInMCL[h.clusterId] += shared_fraction * hit->energy();
2479  //Same but for layer clusters for the cell association per layer
2480  CPEnergyInLC[h.clusterId] += shared_fraction * hit->energy();
2481  //Here cPOnLayer[caloparticle][layer] describe above is set.
2482  //Here for multi clusters with matched rechit the CP fraction times hit energy is added and saved .
2483  cPOnLayer[h.clusterId][lcLayerId].layerClusterIdToEnergyAndScore[mclId].first +=
2484  shared_fraction * hit->energy();
2485  cPOnLayer[h.clusterId][lcLayerId].layerClusterIdToEnergyAndScore[mclId].second = FLT_MAX;
2486  //cpsInMultiCluster[multicluster][CPids]
2487  //Connects a multi cluster with all related caloparticles.
2488  cpsInMultiCluster[mclId].emplace_back(h.clusterId, FLT_MAX);
2489  //From all CaloParticles related to a layer cluster, he saves id and energy of the calo particle
2490  //that after simhit-rechit matching in layer has the maximum energy.
2491  if (shared_fraction > maxCPEnergyInLC) {
2492  //energy is used only here. cpid is saved for multiclusters
2493  maxCPEnergyInLC = CPEnergyInLC[h.clusterId];
2494  maxCPId = h.clusterId;
2495  }
2496  }
2497  //Keep in mind here maxCPId could be zero. So, below ask for negative not including zero to count noise.
2498  hitsToCaloParticleId[hitId] = maxCPId;
2499  }
2500 
2501  } //end of loop through rechits of the layer cluster.
2502 
2503  //Loop through all rechits to count how many of them are noise and how many are matched.
2504  //In case of matched rechit-simhit, he counts and saves the number of rechits related to the maximum energy CaloParticle.
2505  for (auto c : hitsToCaloParticleId) {
2506  if (c < 0) {
2507  numberOfNoiseHitsInMCL++;
2508  } else {
2509  occurrencesCPinMCL[c]++;
2510  }
2511  }
2512 
2513  //Below from all maximum energy CaloParticles, he saves the one with the largest amount
2514  //of related rechits.
2515  for (auto& c : occurrencesCPinMCL) {
2516  if (c.second > maxCPNumberOfHitsInMCL) {
2517  maxCPId_byNumberOfHits = c.first;
2518  maxCPNumberOfHitsInMCL = c.second;
2519  }
2520  }
2521 
2522  //Find the CaloParticle that has the maximum energy shared with the multicluster under study.
2523  for (auto& c : CPEnergyInMCL) {
2524  if (c.second > maxEnergySharedMCLandCP) {
2525  maxCPId_byEnergy = c.first;
2526  maxEnergySharedMCLandCP = c.second;
2527  }
2528  }
2529  //The energy of the CaloParticle that found to have the maximum energy shared with the multicluster under study.
2530  float totalCPEnergyFromLayerCP = 0.f;
2531  if (maxCPId_byEnergy >= 0) {
2532  //Loop through all layers
2533  for (unsigned int j = 0; j < layers * 2; ++j) {
2534  totalCPEnergyFromLayerCP = totalCPEnergyFromLayerCP + cPOnLayer[maxCPId_byEnergy][j].energy;
2535  }
2536  energyFractionOfCPinMCL = maxEnergySharedMCLandCP / totalCPEnergyFromLayerCP;
2537  if (multiClusters[mclId].energy() > 0.f) {
2538  energyFractionOfMCLinCP = maxEnergySharedMCLandCP / multiClusters[mclId].energy();
2539  }
2540  }
2541 
2542  LogDebug("HGCalValidator") << std::setw(12) << "multiCluster"
2543  << "\t" //LogDebug("HGCalValidator")
2544  << std::setw(10) << "mulcl energy"
2545  << "\t" << std::setw(5) << "nhits"
2546  << "\t" << std::setw(12) << "noise hits"
2547  << "\t" << std::setw(22) << "maxCPId_byNumberOfHits"
2548  << "\t" << std::setw(8) << "nhitsCP"
2549  << "\t" << std::setw(16) << "maxCPId_byEnergy"
2550  << "\t" << std::setw(23) << "maxEnergySharedMCLandCP"
2551  << "\t" << std::setw(22) << "totalCPEnergyFromAllLayerCP"
2552  << "\t" << std::setw(22) << "energyFractionOfMCLinCP"
2553  << "\t" << std::setw(25) << "energyFractionOfCPinMCL"
2554  << "\t" << std::endl;
2555  LogDebug("HGCalValidator") << std::setw(12) << mclId << "\t" //LogDebug("HGCalValidator")
2556  << std::setw(10) << multiClusters[mclId].energy() << "\t" << std::setw(5)
2557  << numberOfHitsInMCL << "\t" << std::setw(12) << numberOfNoiseHitsInMCL << "\t"
2558  << std::setw(22) << maxCPId_byNumberOfHits << "\t" << std::setw(8)
2559  << maxCPNumberOfHitsInMCL << "\t" << std::setw(16) << maxCPId_byEnergy << "\t"
2560  << std::setw(23) << maxEnergySharedMCLandCP << "\t" << std::setw(22)
2561  << totalCPEnergyFromLayerCP << "\t" << std::setw(22) << energyFractionOfMCLinCP << "\t"
2562  << std::setw(25) << energyFractionOfCPinMCL << std::endl;
2563 
2564  } //end of loop through multi clusters
2565  }
2566  //Loop through multiclusters
2567  for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
2568  const auto& hits_and_fractions = multiClusters[mclId].hitsAndFractions();
2569  if (!hits_and_fractions.empty()) {
2570  // find the unique caloparticles id contributing to the multi clusters
2571  //cpsInMultiCluster[multicluster][CPids]
2572  std::sort(cpsInMultiCluster[mclId].begin(), cpsInMultiCluster[mclId].end());
2573  auto last = std::unique(cpsInMultiCluster[mclId].begin(), cpsInMultiCluster[mclId].end());
2574  cpsInMultiCluster[mclId].erase(last, cpsInMultiCluster[mclId].end());
2575 
2576  if (multiClusters[mclId].energy() == 0. && !cpsInMultiCluster[mclId].empty()) {
2577  //Loop through all CaloParticles contributing to multicluster mclId.
2578  for (auto& cpPair : cpsInMultiCluster[mclId]) {
2579  //In case of a multi cluster with zero energy but related CaloParticles the score is set to 1.
2580  cpPair.second = 1.;
2581  LogDebug("HGCalValidator") << "multiCluster Id: \t" << mclId << "\t CP id: \t" << cpPair.first
2582  << "\t score \t" << cpPair.second << std::endl;
2583  histograms.h_score_multicl2caloparticle[count]->Fill(cpPair.second);
2584  }
2585  continue;
2586  }
2587 
2588  // Compute the correct normalization
2589  float invMultiClusterEnergyWeight = 0.f;
2590  for (auto const& haf : multiClusters[mclId].hitsAndFractions()) {
2591  invMultiClusterEnergyWeight +=
2592  (haf.second * hitMap.at(haf.first)->energy()) * (haf.second * hitMap.at(haf.first)->energy());
2593  }
2594  invMultiClusterEnergyWeight = 1.f / invMultiClusterEnergyWeight;
2595 
2596  unsigned int numberOfHitsInLC = hits_and_fractions.size();
2597  for (unsigned int i = 0; i < numberOfHitsInLC; ++i) {
2598  DetId rh_detid = hits_and_fractions[i].first;
2599  float rhFraction = hits_and_fractions[i].second;
2600  bool hitWithNoCP = false;
2601 
2602  auto hit_find_in_CP = detIdToCaloParticleId_Map.find(rh_detid);
2603  if (hit_find_in_CP == detIdToCaloParticleId_Map.end())
2604  hitWithNoCP = true;
2605  auto itcheck = hitMap.find(rh_detid);
2606  const HGCRecHit* hit = itcheck->second;
2607  float hitEnergyWeight = hit->energy() * hit->energy();
2608 
2609  for (auto& cpPair : cpsInMultiCluster[mclId]) {
2610  float cpFraction = 0.f;
2611  if (!hitWithNoCP) {
2612  auto findHitIt = std::find(detIdToCaloParticleId_Map[rh_detid].begin(),
2613  detIdToCaloParticleId_Map[rh_detid].end(),
2614  HGVHistoProducerAlgo::detIdInfoInCluster{cpPair.first, 0.f});
2615  if (findHitIt != detIdToCaloParticleId_Map[rh_detid].end()) {
2616  cpFraction = findHitIt->fraction;
2617  }
2618  }
2619  if (cpPair.second == FLT_MAX) {
2620  cpPair.second = 0.f;
2621  }
2622  cpPair.second +=
2623  (rhFraction - cpFraction) * (rhFraction - cpFraction) * hitEnergyWeight * invMultiClusterEnergyWeight;
2624  }
2625  } //end of loop through rechits of layer cluster
2626 
2627  //In case of a multi cluster with some energy but none related CaloParticles print some info.
2628  if (cpsInMultiCluster[mclId].empty())
2629  LogDebug("HGCalValidator") << "multiCluster Id: \t" << mclId << "\tCP id:\t-1 "
2630  << "\t score \t-1"
2631  << "\n";
2632 
2633  auto score = std::min_element(std::begin(cpsInMultiCluster[mclId]),
2634  std::end(cpsInMultiCluster[mclId]),
2635  [](const auto& obj1, const auto& obj2) { return obj1.second < obj2.second; });
2636  for (auto& cpPair : cpsInMultiCluster[mclId]) {
2637  // LogDebug("HGCalValidator") << "multiCluster Id: \t" << mclId
2638  // << "\t CP id: \t" << cpPair.first
2639  // << "\t score \t" << cpPair.second
2640  // << "\n";
2641  LogDebug("HGCalValidator") << "multiCluster Id: \t" << mclId << "\t CP id: \t" << cpPair.first << "\t score \t"
2642  << cpPair.second << std::endl;
2643  if (cpPair.first == score->first) {
2644  histograms.h_score_multicl2caloparticle[count]->Fill(score->second);
2645  }
2646  float sharedeneCPallLayers = 0.;
2647  //Loop through all layers
2648  for (unsigned int j = 0; j < layers * 2; ++j) {
2649  auto const& cp_linked = cPOnLayer[cpPair.first][j].layerClusterIdToEnergyAndScore[mclId];
2650  sharedeneCPallLayers += cp_linked.first;
2651  } //end of loop through layers
2652  LogDebug("HGCalValidator") << "sharedeneCPallLayers " << sharedeneCPallLayers << std::endl;
2653  if (cpPair.first == score->first) {
2654  histograms.h_sharedenergy_multicl2caloparticle[count]->Fill(sharedeneCPallLayers /
2655  multiClusters[mclId].energy());
2656  histograms.h_energy_vs_score_multicl2caloparticle[count]->Fill(
2657  score->second, sharedeneCPallLayers / multiClusters[mclId].energy());
2658  }
2659  }
2660  auto assocFakeMerge = std::count_if(std::begin(cpsInMultiCluster[mclId]),
2661  std::end(cpsInMultiCluster[mclId]),
2662  [](const auto& obj) { return obj.second < ScoreCutMCLtoCPFakeMerge_; });
2663  tracksters_fakemerge[mclId] = assocFakeMerge;
2664  }
2665  } //end of loop through multiclusters
2666 
2667  std::unordered_map<int, std::vector<float>> score3d;
2668  std::unordered_map<int, std::vector<float>> mclsharedenergy;
2669  std::unordered_map<int, std::vector<float>> mclsharedenergyfrac;
2670 
2671  for (unsigned int i = 0; i < nCaloParticles; ++i) {
2672  auto cpIndex = cPIndices[i];
2673  score3d[cpIndex].resize(nMultiClusters);
2674  mclsharedenergy[cpIndex].resize(nMultiClusters);
2675  mclsharedenergyfrac[cpIndex].resize(nMultiClusters);
2676  for (unsigned int j = 0; j < nMultiClusters; ++j) {
2677  score3d[cpIndex][j] = FLT_MAX;
2678  mclsharedenergy[cpIndex][j] = 0.f;
2679  mclsharedenergyfrac[cpIndex][j] = 0.f;
2680  }
2681  }
2682 
2683  // Here we do fill the plots to compute the different metrics linked to
2684  // gen-level, namely efficiency an duplicate. In this loop we should restrict
2685  // only to the selected caloParaticles.
2686  for (const auto& cpId : cPSelectedIndices) {
2687  //We need to keep the multiclusters ids that are related to
2688  //CaloParticle under study for the final filling of the score.
2689  std::vector<unsigned int> cpId_mclId_related;
2690  cpId_mclId_related.clear();
2691 
2692  float CPenergy = 0.f;
2693  for (unsigned int layerId = 0; layerId < layers * 2; ++layerId) {
2694  unsigned int CPNumberOfHits = cPOnLayer[cpId][layerId].hits_and_fractions.size();
2695  //Below gives the CP energy related to multicluster per layer.
2696  CPenergy += cPOnLayer[cpId][layerId].energy;
2697  if (CPNumberOfHits == 0)
2698  continue;
2699  int mclWithMaxEnergyInCP = -1;
2700  //This is the maximum energy related to multicluster per layer.
2701  float maxEnergyMCLperlayerinCP = 0.f;
2702  float CPEnergyFractionInMCLperlayer = 0.f;
2703  //Remember and not confused by name. layerClusterIdToEnergyAndScore contains the multicluster id.
2704  for (const auto& mcl : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
2705  if (mcl.second.first > maxEnergyMCLperlayerinCP) {
2706  maxEnergyMCLperlayerinCP = mcl.second.first;
2707  mclWithMaxEnergyInCP = mcl.first;
2708  }
2709  }
2710  if (CPenergy > 0.f)
2711  CPEnergyFractionInMCLperlayer = maxEnergyMCLperlayerinCP / CPenergy;
2712 
2713  LogDebug("HGCalValidator") << std::setw(8) << "LayerId:\t" << std::setw(12) << "caloparticle\t" << std::setw(15)
2714  << "cp total energy\t" << std::setw(15) << "cpEnergyOnLayer\t" << std::setw(14)
2715  << "CPNhitsOnLayer\t" << std::setw(18) << "mclWithMaxEnergyInCP\t" << std::setw(15)
2716  << "maxEnergyMCLinCP\t" << std::setw(20) << "CPEnergyFractionInMCL"
2717  << "\n";
2718  LogDebug("HGCalValidator") << std::setw(8) << layerId << "\t" << std::setw(12) << cpId << "\t" << std::setw(15)
2719  << cP[cpId].energy() << "\t" << std::setw(15) << CPenergy << "\t" << std::setw(14)
2720  << CPNumberOfHits << "\t" << std::setw(18) << mclWithMaxEnergyInCP << "\t"
2721  << std::setw(15) << maxEnergyMCLperlayerinCP << "\t" << std::setw(20)
2722  << CPEnergyFractionInMCLperlayer << "\n";
2723 
2724  for (unsigned int i = 0; i < CPNumberOfHits; ++i) {
2725  auto& cp_hitDetId = cPOnLayer[cpId][layerId].hits_and_fractions[i].first;
2726  auto& cpFraction = cPOnLayer[cpId][layerId].hits_and_fractions[i].second;
2727 
2728  bool hitWithNoMCL = false;
2729  if (cpFraction == 0.f)
2730  continue; //hopefully this should never happen
2731  auto hit_find_in_MCL = detIdToMultiClusterId_Map.find(cp_hitDetId);
2732  if (hit_find_in_MCL == detIdToMultiClusterId_Map.end())
2733  hitWithNoMCL = true;
2734  auto itcheck = hitMap.find(cp_hitDetId);
2735  const HGCRecHit* hit = itcheck->second;
2736  float hitEnergyWeight = hit->energy() * hit->energy();
2737  for (auto& lcPair : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
2738  unsigned int multiClusterId = lcPair.first;
2739  if (std::find(std::begin(cpId_mclId_related), std::end(cpId_mclId_related), multiClusterId) ==
2740  std::end(cpId_mclId_related)) {
2741  cpId_mclId_related.push_back(multiClusterId);
2742  }
2743  float mclFraction = 0.f;
2744 
2745  if (!hitWithNoMCL) {
2746  auto findHitIt = std::find(detIdToMultiClusterId_Map[cp_hitDetId].begin(),
2747  detIdToMultiClusterId_Map[cp_hitDetId].end(),
2748  HGVHistoProducerAlgo::detIdInfoInMultiCluster{multiClusterId, 0, 0.f});
2749  if (findHitIt != detIdToMultiClusterId_Map[cp_hitDetId].end())
2750  mclFraction = findHitIt->fraction;
2751  }
2752  //Observe here that we do not divide as before by the layer cluster energy weight. We should sum first
2753  //over all layers and divide with the total CP energy over all layers.
2754  if (lcPair.second.second == FLT_MAX) {
2755  lcPair.second.second = 0.f;
2756  }
2757  lcPair.second.second += (mclFraction - cpFraction) * (mclFraction - cpFraction) * hitEnergyWeight;
2758  LogDebug("HGCalValidator") << "multiClusterId:\t" << multiClusterId << "\t"
2759  << "mclfraction,cpfraction:\t" << mclFraction << ", " << cpFraction << "\t"
2760  << "hitEnergyWeight:\t" << hitEnergyWeight << "\t"
2761  << "currect score numerator:\t" << lcPair.second.second << "\n";
2762  }
2763  } //end of loop through sim hits of current calo particle
2764 
2765  if (cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore.empty())
2766  LogDebug("HGCalValidator") << "CP Id: \t" << cpId << "\t MCL id:\t-1 "
2767  << "\t layer \t " << layerId << " Sub score in \t -1"
2768  << "\n";
2769 
2770  for (const auto& lcPair : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
2771  //3d score here without the denominator at this point
2772  if (score3d[cpId][lcPair.first] == FLT_MAX) {
2773  score3d[cpId][lcPair.first] = 0.f;
2774  }
2775  score3d[cpId][lcPair.first] += lcPair.second.second;
2776  mclsharedenergy[cpId][lcPair.first] += lcPair.second.first;
2777  }
2778  } //end of loop through layers
2779 
2780  // Compute the correct normalization
2781  // We need to loop on the cPOnLayer data structure since this is the
2782  // only one that has the compressed information for multiple usage
2783  // of the same DetId by different SimClusters by a single CaloParticle.
2784  float invCPEnergyWeight = 0.f;
2785  for (const auto& layer : cPOnLayer[cpId]) {
2786  for (const auto& haf : layer.hits_and_fractions) {
2787  invCPEnergyWeight +=
2788  (haf.second * hitMap.at(haf.first)->energy()) * (haf.second * hitMap.at(haf.first)->energy());
2789  }
2790  }
2791  invCPEnergyWeight = 1.f / invCPEnergyWeight;
2792 
2793  //Loop through related multiclusters here
2794  //Will switch to vector for access because it is faster
2795  std::vector<int> cpId_mclId_related_vec(cpId_mclId_related.begin(), cpId_mclId_related.end());
2796  for (unsigned int i = 0; i < cpId_mclId_related_vec.size(); ++i) {
2797  auto mclId = cpId_mclId_related_vec[i];
2798  //Now time for the denominator
2799  score3d[cpId][mclId] = score3d[cpId][mclId] * invCPEnergyWeight;
2800  mclsharedenergyfrac[cpId][mclId] = (mclsharedenergy[cpId][mclId] / CPenergy);
2801 
2802  LogDebug("HGCalValidator") << "CP Id: \t" << cpId << "\t MCL id: \t" << mclId << "\t score \t" //
2803  << score3d[cpId][mclId] << "\t"
2804  << "invCPEnergyWeight \t" << invCPEnergyWeight << "\t"
2805  << "shared energy:\t" << mclsharedenergy[cpId][mclId] << "\t"
2806  << "shared energy fraction:\t" << mclsharedenergyfrac[cpId][mclId] << "\n";
2807 
2808  histograms.h_score_caloparticle2multicl[count]->Fill(score3d[cpId][mclId]);
2809 
2810  histograms.h_sharedenergy_caloparticle2multicl[count]->Fill(mclsharedenergyfrac[cpId][mclId]);
2811  histograms.h_energy_vs_score_caloparticle2multicl[count]->Fill(score3d[cpId][mclId],
2812  mclsharedenergyfrac[cpId][mclId]);
2813  } //end of loop through multiclusters
2814 
2815  auto is_assoc = [&](const auto& v) -> bool { return v < ScoreCutCPtoMCLEffDup_; };
2816 
2817  auto assocDup = std::count_if(std::begin(score3d[cpId]), std::end(score3d[cpId]), is_assoc);
2818 
2819  if (assocDup > 0) {
2820  histograms.h_num_caloparticle_eta[count]->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
2821  histograms.h_num_caloparticle_phi[count]->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
2822  auto best = std::min_element(std::begin(score3d[cpId]), std::end(score3d[cpId]));
2823  auto bestmclId = std::distance(std::begin(score3d[cpId]), best);
2824 
2825  histograms.h_sharedenergy_caloparticle2multicl_vs_eta[count]->Fill(cP[cpId].g4Tracks()[0].momentum().eta(),
2826  multiClusters[bestmclId].energy() / CPenergy);
2827  histograms.h_sharedenergy_caloparticle2multicl_vs_phi[count]->Fill(cP[cpId].g4Tracks()[0].momentum().phi(),
2828  multiClusters[bestmclId].energy() / CPenergy);
2829  histograms.h_sharedenergy_caloparticle2multicl_assoc[count]->Fill(mclsharedenergyfrac[cpId][bestmclId]);
2830  }
2831  if (assocDup >= 2) {
2832  auto match = std::find_if(std::begin(score3d[cpId]), std::end(score3d[cpId]), is_assoc);
2833  while (match != score3d[cpId].end()) {
2834  tracksters_duplicate[std::distance(std::begin(score3d[cpId]), match)] = 1;
2835  match = std::find_if(std::next(match), std::end(score3d[cpId]), is_assoc);
2836  }
2837  }
2838  histograms.h_denom_caloparticle_eta[count]->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
2839  histograms.h_denom_caloparticle_phi[count]->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
2840 
2841  } //end of loop through caloparticles
2842 
2843  // Here we do fill the plots to compute the different metrics linked to
2844  // reco-level, namely fake-rate an merge-rate. In this loop we should *not*
2845  // restrict only to the selected caloParaticles.
2846  for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
2847  const auto& hits_and_fractions = multiClusters[mclId].hitsAndFractions();
2848  if (!hits_and_fractions.empty()) {
2849  auto assocFakeMerge = tracksters_fakemerge[mclId];
2850  auto assocDuplicate = tracksters_duplicate[mclId];
2851  if (assocDuplicate) {
2852  histograms.h_numDup_multicl_eta[count]->Fill(multiClusters[mclId].eta());
2853  histograms.h_numDup_multicl_phi[count]->Fill(multiClusters[mclId].phi());
2854  }
2855  if (assocFakeMerge > 0) {
2856  histograms.h_num_multicl_eta[count]->Fill(multiClusters[mclId].eta());
2857  histograms.h_num_multicl_phi[count]->Fill(multiClusters[mclId].phi());
2858  auto best = std::min_element(std::begin(cpsInMultiCluster[mclId]),
2859  std::end(cpsInMultiCluster[mclId]),
2860  [](const auto& obj1, const auto& obj2) { return obj1.second < obj2.second; });
2861 
2862  //This is the shared energy taking the best caloparticle in each layer
2863  float sharedeneCPallLayers = 0.;
2864  //Loop through all layers
2865  for (unsigned int j = 0; j < layers * 2; ++j) {
2866  auto const& best_cp_linked = cPOnLayer[best->first][j].layerClusterIdToEnergyAndScore[mclId];
2867  sharedeneCPallLayers += best_cp_linked.first;
2868  } //end of loop through layers
2869  histograms.h_sharedenergy_multicl2caloparticle_vs_eta[count]->Fill(
2870  multiClusters[mclId].eta(), sharedeneCPallLayers / multiClusters[mclId].energy());
2871  histograms.h_sharedenergy_multicl2caloparticle_vs_phi[count]->Fill(
2872  multiClusters[mclId].phi(), sharedeneCPallLayers / multiClusters[mclId].energy());
2873  }
2874  if (assocFakeMerge >= 2) {
2875  histograms.h_numMerge_multicl_eta[count]->Fill(multiClusters[mclId].eta());
2876  histograms.h_numMerge_multicl_phi[count]->Fill(multiClusters[mclId].phi());
2877  }
2878  histograms.h_denom_multicl_eta[count]->Fill(multiClusters[mclId].eta());
2879  histograms.h_denom_multicl_phi[count]->Fill(multiClusters[mclId].phi());
2880  }
2881  }
2882 }
2883 
2885  int count,
2886  const std::vector<reco::HGCalMultiCluster>& multiClusters,
2887  std::vector<CaloParticle> const& cP,
2888  std::vector<size_t> const& cPIndices,
2889  std::vector<size_t> const& cPSelectedIndices,
2890  std::unordered_map<DetId, const HGCRecHit*> const& hitMap,
2891  unsigned int layers) const {
2892  //Each event to be treated as two events:
2893  //an event in +ve endcap, plus another event in -ve endcap.
2894 
2895  //To keep track of total num of multiclusters
2896  int tnmclmz = 0; //-z
2897  int tnmclpz = 0; //+z
2898  //To count the number of multiclusters with 3 contiguous layers per event.
2899  int tncontmclpz = 0; //+z
2900  int tncontmclmz = 0; //-z
2901  //For the number of multiclusters without 3 contiguous layers per event.
2902  int tnnoncontmclpz = 0; //+z
2903  int tnnoncontmclmz = 0; //-z
2904  //We want to check below the score of cont and non cont multiclusters
2905  std::vector<bool> contmulti;
2906  contmulti.clear();
2907 
2908  //[mclId]-> vector of 2d layer clusters size
2909  std::unordered_map<unsigned int, std::vector<unsigned int>> multiplicity;
2910  //[mclId]-> [layer][cluster size]
2911  std::unordered_map<unsigned int, std::vector<unsigned int>> multiplicity_vs_layer;
2912  //We will need for the scale text option
2913  // unsigned int totallcinmcls = 0;
2914  // for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
2915  // totallcinmcls = totallcinmcls + multiClusters[mclId].clusters().size();
2916  // }
2917 
2918  auto nMultiClusters = multiClusters.size();
2919  //loop through multiclusters of the event
2920  for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
2921  const auto layerClusters = multiClusters[mclId].clusters();
2922  auto nLayerClusters = layerClusters.size();
2923 
2924  if (nLayerClusters == 0)
2925  continue;
2926 
2927  if (multiClusters[mclId].z() < 0.) {
2928  tnmclmz++;
2929  }
2930  if (multiClusters[mclId].z() > 0.) {
2931  tnmclpz++;
2932  }
2933 
2934  //Total number of layer clusters in multicluster
2935  int tnlcinmcl = 0;
2936 
2937  //To keep track of total num of layer clusters per multicluster
2938  //tnlcinmclperlaypz[layerid], tnlcinmclperlaymz[layerid]
2939  std::vector<int> tnlcinmclperlay(1000, 0); //+z
2940 
2941  //For the layers the multicluster expands to. Will use a set because there would be many
2942  //duplicates and then go back to vector for random access, since they say it is faster.
2943  std::set<int> multicluster_layers;
2944 
2945  bool multiclusterInZplus = false;
2946  bool multiclusterInZminus = false;
2947 
2948  //Loop through layer clusters
2949  for (unsigned int lcId = 0; lcId < nLayerClusters; ++lcId) {
2950  //take the hits and their fraction of the specific layer cluster.
2951  const std::vector<std::pair<DetId, float>>& hits_and_fractions = layerClusters[lcId]->hitsAndFractions();
2952 
2953  //For the multiplicity of the 2d layer clusters in multiclusters
2954  multiplicity[mclId].emplace_back(hits_and_fractions.size());
2955 
2956  const auto firstHitDetId = hits_and_fractions[0].first;
2957  //The layer that the layer cluster belongs to
2958  int layerid = recHitTools_->getLayerWithOffset(firstHitDetId) +
2959  layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
2960  multicluster_layers.insert(layerid);
2961  multiplicity_vs_layer[mclId].emplace_back(layerid);
2962 
2963  tnlcinmclperlay[layerid]++;
2964  tnlcinmcl++;
2965 
2966  if (recHitTools_->zside(firstHitDetId) > 0.) {
2967  multiclusterInZplus = true;
2968  }
2969  if (recHitTools_->zside(firstHitDetId) < 0.) {
2970  multiclusterInZminus = true;
2971  }
2972 
2973  } //end of loop through layerclusters
2974 
2975  //Per layer : Loop 0->99
2976  for (unsigned ilayer = 0; ilayer < layers * 2; ++ilayer) {
2977  if (histograms.h_clusternum_in_multicluster_perlayer[count].count(ilayer) && tnlcinmclperlay[ilayer] != 0) {
2978  histograms.h_clusternum_in_multicluster_perlayer[count].at(ilayer)->Fill((float)tnlcinmclperlay[ilayer]);
2979  }
2980  //For the profile now of 2d layer cluster in multiclusters vs layer number.
2981  if (tnlcinmclperlay[ilayer] != 0) {
2982  histograms.h_clusternum_in_multicluster_vs_layer[count]->Fill((float)ilayer, (float)tnlcinmclperlay[ilayer]);
2983  }
2984  } //end of loop over layers
2985 
2986  //Looking for multiclusters with 3 contiguous layers per event.
2987  std::vector<int> multicluster_layers_vec(multicluster_layers.begin(), multicluster_layers.end());
2988  //Since we want to also check for non contiguous multiclusters
2989  bool contimulti = false;
2990  //Observe that we start from 1 and go up to size - 1 element.
2991  if (multicluster_layers_vec.size() >= 3) {
2992  for (unsigned int i = 1; i < multicluster_layers_vec.size() - 1; ++i) {
2993  if ((multicluster_layers_vec[i - 1] + 1 == multicluster_layers_vec[i]) &&
2994  (multicluster_layers_vec[i + 1] - 1 == multicluster_layers_vec[i])) {
2995  //So, this is a multicluster with 3 contiguous layers per event
2996  if (multiclusterInZplus) {
2997  tncontmclpz++;
2998  }
2999  if (multiclusterInZminus) {
3000  tncontmclmz++;
3001  }
3002  contimulti = true;
3003  break;
3004  }
3005  }
3006  }
3007  //Count non contiguous multiclusters
3008  if (!contimulti) {
3009  if (multiclusterInZplus) {
3010  tnnoncontmclpz++;
3011  }
3012  if (multiclusterInZminus) {
3013  tnnoncontmclmz++;
3014  }
3015  }
3016 
3017  //Save for the score
3018  contmulti.push_back(contimulti);
3019 
3020  histograms.h_clusternum_in_multicluster[count]->Fill(tnlcinmcl);
3021 
3022  for (unsigned int lc = 0; lc < multiplicity[mclId].size(); ++lc) {
3023  //multiplicity of the current LC
3024  float mlp = std::count(std::begin(multiplicity[mclId]), std::end(multiplicity[mclId]), multiplicity[mclId][lc]);
3025  //LogDebug("HGCalValidator") << "mlp %" << (100. * mlp)/ ((float) nLayerClusters) << std::endl;
3026  // histograms.h_multiplicityOfLCinMCL[count]->Fill( mlp , multiplicity[mclId][lc] , 100. / (float) totallcinmcls );
3027  histograms.h_multiplicityOfLCinMCL[count]->Fill(mlp, multiplicity[mclId][lc]);
3028  //When we will plot with the text option we want the entries to be the same
3029  //as the % of the current cell over the whole number of clusters. For this we need an extra histo.
3030  histograms.h_multiplicity_numberOfEventsHistogram[count]->Fill(mlp);
3031  //For the cluster multiplicity vs layer
3032  //First with the -z endcap (V10:0->49)
3033  if (multiplicity_vs_layer[mclId][lc] < layers) {
3034  histograms.h_multiplicityOfLCinMCL_vs_layercluster_zminus[count]->Fill(mlp, multiplicity_vs_layer[mclId][lc]);
3035  histograms.h_multiplicity_zminus_numberOfEventsHistogram[count]->Fill(mlp);
3036  } else { //Then for the +z (V10:50->99)
3037  histograms.h_multiplicityOfLCinMCL_vs_layercluster_zplus[count]->Fill(
3038  mlp, multiplicity_vs_layer[mclId][lc] - layers);
3039  histograms.h_multiplicity_zplus_numberOfEventsHistogram[count]->Fill(mlp);
3040  }
3041  //For the cluster multiplicity vs cluster energy
3042  histograms.h_multiplicityOfLCinMCL_vs_layerclusterenergy[count]->Fill(mlp, layerClusters[lc]->energy());
3043  }
3044 
3045  if (!multicluster_layers.empty()) {
3046  histograms.h_multicluster_x[count]->Fill(multiClusters[mclId].x());
3047  histograms.h_multicluster_y[count]->Fill(multiClusters[mclId].y());
3048  histograms.h_multicluster_z[count]->Fill(multiClusters[mclId].z());
3049  histograms.h_multicluster_eta[count]->Fill(multiClusters[mclId].eta());
3050  histograms.h_multicluster_phi[count]->Fill(multiClusters[mclId].phi());
3051 
3052  histograms.h_multicluster_firstlayer[count]->Fill((float)*multicluster_layers.begin());
3053  histograms.h_multicluster_lastlayer[count]->Fill((float)*multicluster_layers.rbegin());
3054  histograms.h_multicluster_layersnum[count]->Fill((float)multicluster_layers.size());
3055 
3056  histograms.h_multicluster_pt[count]->Fill(multiClusters[mclId].pt());
3057 
3058  histograms.h_multicluster_energy[count]->Fill(multiClusters[mclId].energy());
3059  }
3060 
3061  } //end of loop through multiclusters
3062 
3063  histograms.h_multiclusternum[count]->Fill(tnmclmz + tnmclpz);
3064  histograms.h_contmulticlusternum[count]->Fill(tncontmclpz + tncontmclmz);
3065  histograms.h_noncontmulticlusternum[count]->Fill(tnnoncontmclpz + tnnoncontmclmz);
3066 
3067  multiClusters_to_CaloParticles(histograms, count, multiClusters, cP, cPIndices, cPSelectedIndices, hitMap, layers);
3068 }
3069 
3071  const double y1,
3072  const double x2,
3073  const double y2) const { //distance squared
3074  const double dx = x1 - x2;
3075  const double dy = y1 - y2;
3076  return (dx * dx + dy * dy);
3077 } //distance squaredq
3079  const double y1,
3080  const double x2,
3081  const double y2) const { //2-d distance on the layer (x-y)
3082  return std::sqrt(distance2(x1, y1, x2, y2));
3083 }
3084 
3085 void HGVHistoProducerAlgo::setRecHitTools(std::shared_ptr<hgcal::RecHitTools> recHitTools) {
3086  recHitTools_ = recHitTools;
3087 }
3088 
3090  std::unordered_map<DetId, const HGCRecHit*> const& hitMap) const {
3091  DetId themaxid;
3092  const std::vector<std::pair<DetId, float>>& hits_and_fractions = cluster.hitsAndFractions();
3093 
3094  double maxene = 0.;
3095  for (std::vector<std::pair<DetId, float>>::const_iterator it_haf = hits_and_fractions.begin();
3096  it_haf != hits_and_fractions.end();
3097  ++it_haf) {
3098  DetId rh_detid = it_haf->first;
3099 
3100  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
3101  const HGCRecHit* hit = itcheck->second;
3102 
3103  if (maxene < hit->energy()) {
3104  maxene = hit->energy();
3105  themaxid = rh_detid;
3106  }
3107  }
3108 
3109  return themaxid;
3110 }
3111 
3112 double HGVHistoProducerAlgo::getEta(double eta) const {
3113  if (useFabsEta_)
3114  return fabs(eta);
3115  else
3116  return eta;
3117 }
HGVHistoProducerAlgo::bookSimClusterAssociationHistos
void bookSimClusterAssociationHistos(DQMStore::IBooker &ibook, Histograms &histograms, unsigned int layers, std::vector< int > thicknesses)
Definition: HGVHistoProducerAlgo.cc:337
HGVHistoProducerAlgo::nintLongDepBary_
int nintLongDepBary_
Definition: HGVHistoProducerAlgo.h:358
HGVHistoProducerAlgo::minTotNsimClsperthick_
double minTotNsimClsperthick_
Definition: HGVHistoProducerAlgo.h:373
HGVHistoProducerAlgo::maxSizeCLsinMCLs_
double maxSizeCLsinMCLs_
Definition: HGVHistoProducerAlgo.h:401
HGVHistoProducerAlgo::maxEta_
double maxEta_
Definition: HGVHistoProducerAlgo.h:342
DDAxes::y
HGVHistoProducerAlgo::bookInfo
void bookInfo(DQMStore::IBooker &ibook, Histograms &histograms)
Definition: HGVHistoProducerAlgo.cc:203
HGVHistoProducerAlgo::maxY_
double maxY_
Definition: HGVHistoProducerAlgo.h:407
electrons_cff.bool
bool
Definition: electrons_cff.py:366
HGVHistoProducerAlgo::nintTotNcellsperthickperlayer_
int nintTotNcellsperthickperlayer_
Definition: HGVHistoProducerAlgo.h:378
edm::AssociationMap::find
const_iterator find(const key_type &k) const
find element with specified reference key
Definition: AssociationMap.h:173
mps_fire.i
i
Definition: mps_fire.py:428
HGVHistoProducerAlgo::maxClEnepermultiplicity_
double maxClEnepermultiplicity_
Definition: HGVHistoProducerAlgo.h:403
HGVHistoProducerAlgo::nintDisSeedToMaxperthickperlayer_
int nintDisSeedToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:388
MessageLogger.h
HGVHistoProducerAlgo::minMixedHitsCluster_
double minMixedHitsCluster_
Definition: HGVHistoProducerAlgo.h:353
HGVHistoProducerAlgo::useFabsEta_
bool useFabsEta_
Definition: HGVHistoProducerAlgo.h:344
HGVHistoProducerAlgo::minZ_
double minZ_
Definition: HGVHistoProducerAlgo.h:409
HGVHistoProducerAlgo::bookClusterHistos_ClusterLevel
void bookClusterHistos_ClusterLevel(DQMStore::IBooker &ibook, Histograms &histograms, unsigned int layers, std::vector< int > thicknesses, std::string pathtomatbudfile)
Definition: HGVHistoProducerAlgo.cc:594
CaloParticle::eta
float eta() const
Momentum pseudorapidity. Note this is taken from the simtrack before the calorimeter.
Definition: CaloParticle.h:142
HGVHistoProducerAlgo::maxMixedHitsCluster_
double maxMixedHitsCluster_
Definition: HGVHistoProducerAlgo.h:353
HGVHistoProducerAlgo::nintClEnepermultiplicity_
int nintClEnepermultiplicity_
Definition: HGVHistoProducerAlgo.h:404
HGVHistoProducerAlgo::nintPhi_
int nintPhi_
Definition: HGVHistoProducerAlgo.h:350
HGVHistoProducerAlgo::fill_generic_cluster_histos
void fill_generic_cluster_histos(const Histograms &histograms, int count, edm::Handle< reco::CaloClusterCollection > clusterHandle, const reco::CaloClusterCollection &clusters, const Density &densities, edm::Handle< std::vector< CaloParticle >> caloParticleHandle, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::vector< size_t > const &cPSelectedIndices, std::unordered_map< DetId, const HGCRecHit * > const &, std::map< double, double > cummatbudg, unsigned int layers, std::vector< int > thicknesses, const hgcal::RecoToSimCollection &recSimColl, const hgcal::SimToRecoCollection &simRecColl) const
Definition: HGVHistoProducerAlgo.cc:1950
HGVHistoProducerAlgo::maxEneClperlay_
double maxEneClperlay_
Definition: HGVHistoProducerAlgo.h:365
HGVHistoProducerAlgo::bookCaloParticleHistos
void bookCaloParticleHistos(DQMStore::IBooker &ibook, Histograms &histograms, int pdgid, unsigned int layers)
Definition: HGVHistoProducerAlgo.cc:212
f
double f[11][100]
Definition: MuScleFitUtils.cc:78
HGVHistoProducerAlgo::nintZpos_
int nintZpos_
Definition: HGVHistoProducerAlgo.h:360
HGVHistoProducerAlgo::minDisToMaxperthickperlayer_
double minDisToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:383
DiDispStaMuonMonitor_cfi.pt
pt
Definition: DiDispStaMuonMonitor_cfi.py:39
min
T min(T a, T b)
Definition: MathUtil.h:58
ecaldqm::zside
int zside(DetId const &)
Definition: EcalDQMCommonUtils.cc:189
HGVHistoProducerAlgo::maxTotNsimClsperlay_
double maxTotNsimClsperlay_
Definition: HGVHistoProducerAlgo.h:361
testProducerWithPsetDescEmpty_cfi.x2
x2
Definition: testProducerWithPsetDescEmpty_cfi.py:28
AlCaHLTBitMon_ParallelJobs.p
p
Definition: AlCaHLTBitMon_ParallelJobs.py:153
HGVHistoProducerAlgo::minX_
double minX_
Definition: HGVHistoProducerAlgo.h:405
DetId::det
constexpr Detector det() const
get the detector field from this detid
Definition: DetId.h:46
HGVHistoProducerAlgo::bookClusterHistos_LCtoCP_association
void bookClusterHistos_LCtoCP_association(DQMStore::IBooker &ibook, Histograms &histograms, unsigned int layers)
Definition: HGVHistoProducerAlgo.cc:691
HGVHistoProducerAlgo::layerClusters_to_CaloParticles
void layerClusters_to_CaloParticles(const Histograms &histograms, edm::Handle< reco::CaloClusterCollection > clusterHandle, const reco::CaloClusterCollection &clusters, edm::Handle< std::vector< CaloParticle >> caloParticleHandle, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::vector< size_t > const &cPSelectedIndices, std::unordered_map< DetId, const HGCRecHit * > const &, unsigned int layers, const hgcal::RecoToSimCollection &recSimColl, const hgcal::SimToRecoCollection &simRecColl) const
Definition: HGVHistoProducerAlgo.cc:1500
HGVHistoProducerAlgo::minTotNClsinMCLs_
double minTotNClsinMCLs_
Definition: HGVHistoProducerAlgo.h:395
HGVHistoProducerAlgo::~HGVHistoProducerAlgo
~HGVHistoProducerAlgo()
Definition: HGVHistoProducerAlgo.cc:201
HGVHistoProducerAlgo::maxTotNClsperthick_
double maxTotNClsperthick_
Definition: HGVHistoProducerAlgo.h:375
HGVHistoProducerAlgo::nintEne_
int nintEne_
Definition: HGVHistoProducerAlgo.h:346
FastTrackerRecHitCombiner_cfi.simHits
simHits
Definition: FastTrackerRecHitCombiner_cfi.py:5
HGVHistoProducerAlgo::HGVHistoProducerAlgo
HGVHistoProducerAlgo(const edm::ParameterSet &pset)
Definition: HGVHistoProducerAlgo.cc:22
CaloParticle::g4Tracks
const std::vector< SimTrack > & g4Tracks() const
Definition: CaloParticle.h:74
CaloParticle::energy
float energy() const
Energy. Note this is taken from the first SimTrack only.
Definition: CaloParticle.h:98
DDAxes::x
edm::RefVector< SimClusterCollection >
SimCluster
Monte Carlo truth information used for tracking validation.
Definition: SimCluster.h:29
HGVHistoProducerAlgo::nintTotNsimClsperthick_
int nintTotNsimClsperthick_
Definition: HGVHistoProducerAlgo.h:374
findQualityFiles.v
v
Definition: findQualityFiles.py:179
ScoreCutLCtoCP_
const double ScoreCutLCtoCP_
Definition: HGVHistoProducerAlgo.cc:15
HGVHistoProducerAlgo::minEneClperlay_
double minEneClperlay_
Definition: HGVHistoProducerAlgo.h:365
spr::find
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
Definition: FindCaloHit.cc:19
newFWLiteAna.found
found
Definition: newFWLiteAna.py:118
edm::Handle
Definition: AssociativeIterator.h:50
HGVHistoProducerAlgo::nintTotNsimClsperlay_
int nintTotNsimClsperlay_
Definition: HGVHistoProducerAlgo.h:362
HGVHistoProducerAlgo::minSizeCLsinMCLs_
double minSizeCLsinMCLs_
Definition: HGVHistoProducerAlgo.h:401
edm::Ref
Definition: AssociativeIterator.h:58
HGVHistoProducerAlgo::nintDisToMaxperthickperlayerenewei_
int nintDisToMaxperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:386
HGVHistoProducerAlgo::fill_caloparticle_histos
void fill_caloparticle_histos(const Histograms &histograms, int pdgid, const CaloParticle &caloparticle, std::vector< SimVertex > const &simVertices, unsigned int layers, std::unordered_map< DetId, const HGCRecHit * > const &) const
Definition: HGVHistoProducerAlgo.cc:1219
trackingPlots.assoc
assoc
Definition: trackingPlots.py:184
HGVHistoProducerAlgo::nintEneCl_
int nintEneCl_
Definition: HGVHistoProducerAlgo.h:356
ScoreCutMCLtoCPFakeMerge_
const double ScoreCutMCLtoCPFakeMerge_
Definition: HGVHistoProducerAlgo.cc:19
HGVHistoProducerAlgo::minTotNsimClsperlay_
double minTotNsimClsperlay_
Definition: HGVHistoProducerAlgo.h:361
ScoreCutCPtoMCLEffDup_
const double ScoreCutCPtoMCLEffDup_
Definition: HGVHistoProducerAlgo.cc:20
HGVHistoProducerAlgo::maxDisToMaxperthickperlayerenewei_
double maxDisToMaxperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:385
HGVHistoProducerAlgo::maxEneCl_
double maxEneCl_
Definition: HGVHistoProducerAlgo.h:355
DetId
Definition: DetId.h:17
HGVHistoProducerAlgo::setRecHitTools
void setRecHitTools(std::shared_ptr< hgcal::RecHitTools > recHitTools)
Definition: HGVHistoProducerAlgo.cc:3085
HGVHistoProducerAlgo::minLongDepBary_
double minLongDepBary_
Definition: HGVHistoProducerAlgo.h:357
edm::AssociationMap::end
const_iterator end() const
last iterator over the map (read only)
Definition: AssociationMap.h:171
HGVHistoProducerAlgo::maxZpos_
double maxZpos_
Definition: HGVHistoProducerAlgo.h:359
DetId::HGCalHSi
Definition: DetId.h:33
DetId::HGCalEE
Definition: DetId.h:32
testProducerWithPsetDescEmpty_cfi.x1
x1
Definition: testProducerWithPsetDescEmpty_cfi.py:33
dqmdumpme.last
last
Definition: dqmdumpme.py:56
testProducerWithPsetDescEmpty_cfi.y1
y1
Definition: testProducerWithPsetDescEmpty_cfi.py:29
HGVHistoProducerAlgo::nintTotNMCLs_
int nintTotNMCLs_
Definition: HGVHistoProducerAlgo.h:394
ScoreCutSCtoLC_
const double ScoreCutSCtoLC_
Definition: HGVHistoProducerAlgo.cc:18
HGVHistoProducerAlgo::nintTotNClsinMCLs_
int nintTotNClsinMCLs_
Definition: HGVHistoProducerAlgo.h:396
HGVHistoProducerAlgo.h
SimCluster.h
h
HGVHistoProducerAlgo::maxMplofLCs_
double maxMplofLCs_
Definition: HGVHistoProducerAlgo.h:399
HGVHistoProducerAlgo::nintDisToSeedperthickperlayer_
int nintDisToSeedperthickperlayer_
Definition: HGVHistoProducerAlgo.h:380
HGVHistoProducerAlgo::minEneCl_
double minEneCl_
Definition: HGVHistoProducerAlgo.h:355
Calorimetry_cff.thickness
thickness
Definition: Calorimetry_cff.py:115
PVValHelper::eta
Definition: PVValidationHelpers.h:70
HGVHistoProducerAlgo::maxLongDepBary_
double maxLongDepBary_
Definition: HGVHistoProducerAlgo.h:357
HGVHistoProducerAlgo::minMixedHitsSimCluster_
double minMixedHitsSimCluster_
Definition: HGVHistoProducerAlgo.h:351
HGVHistoProducerAlgo::maxTotNsimClsperthick_
double maxTotNsimClsperthick_
Definition: HGVHistoProducerAlgo.h:373
reco::CaloCluster
Definition: CaloCluster.h:31
mathSSE::sqrt
T sqrt(T t)
Definition: SSEVec.h:19
reco::CaloClusterCollection
std::vector< CaloCluster > CaloClusterCollection
collection of CaloCluster objects
Definition: CaloClusterFwd.h:19
mps_fire.end
end
Definition: mps_fire.py:242
DDAxes::z
HGVHistoProducerAlgo::nintMixedHitsSimCluster_
int nintMixedHitsSimCluster_
Definition: HGVHistoProducerAlgo.h:352
HGVHistoProducerAlgo::nintMplofLCs_
int nintMplofLCs_
Definition: HGVHistoProducerAlgo.h:400
dqm::implementation::IBooker::bookProfile
MonitorElement * bookProfile(TString const &name, TString const &title, int nchX, double lowX, double highX, int, double lowY, double highY, char const *option="s", FUNC onbooking=NOOP())
Definition: DQMStore.h:322
HGVHistoProducerAlgo::maxDisToSeedperthickperlayerenewei_
double maxDisToSeedperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:381
CaloParticle::phi
float phi() const
Momentum azimuthal angle. Note this is taken from the first SimTrack only.
Definition: CaloParticle.h:134
submitPVResolutionJobs.count
count
Definition: submitPVResolutionJobs.py:352
ScoreCutLCtoSC_
const double ScoreCutLCtoSC_
Definition: HGVHistoProducerAlgo.cc:17
HCALHighEnergyHPDFilter_cfi.energy
energy
Definition: HCALHighEnergyHPDFilter_cfi.py:5
SimTracksters_cff.simclusters
simclusters
Definition: SimTracksters_cff.py:23
HGVHistoProducerAlgo::getEta
double getEta(double eta) const
Definition: HGVHistoProducerAlgo.cc:3112
HGVHistoProducerAlgo::minClEneperthickperlayer_
double minClEneperthickperlayer_
Definition: HGVHistoProducerAlgo.h:389
HGVHistoProducerAlgo::minTotNClsinMCLsperlayer_
double minTotNClsinMCLsperlayer_
Definition: HGVHistoProducerAlgo.h:397
HGVHistoProducerAlgo::nintSizeCLsinMCLs_
int nintSizeCLsinMCLs_
Definition: HGVHistoProducerAlgo.h:402
HGVHistoProducerAlgo::minClEnepermultiplicity_
double minClEnepermultiplicity_
Definition: HGVHistoProducerAlgo.h:403
HLTEgPhaseIITestSequence_cff.layerClusters
layerClusters
Definition: HLTEgPhaseIITestSequence_cff.py:2506
HGVHistoProducerAlgo::maxTotNClsinMCLsperlayer_
double maxTotNClsinMCLsperlayer_
Definition: HGVHistoProducerAlgo.h:397
HGVHistoProducerAlgo::nintZ_
int nintZ_
Definition: HGVHistoProducerAlgo.h:410
HGVHistoProducerAlgo::nintPt_
int nintPt_
Definition: HGVHistoProducerAlgo.h:348
HGVHistoProducerAlgo::nintTotNClsperlay_
int nintTotNClsperlay_
Definition: HGVHistoProducerAlgo.h:364
HGVHistoProducerAlgo::nintSharedEneFrac_
int nintSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:370
HGVHistoProducerAlgo::maxPt_
double maxPt_
Definition: HGVHistoProducerAlgo.h:347
HGVHistoProducerAlgo::minSharedEneFrac_
double minSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:369
getGTfromDQMFile.obj
obj
Definition: getGTfromDQMFile.py:32
HGVHistoProducerAlgo::nintScore_
int nintScore_
Definition: HGVHistoProducerAlgo.h:368
HGVHistoProducerAlgo::fill_cluster_histos
void fill_cluster_histos(const Histograms &histograms, int count, const reco::CaloCluster &cluster) const
Definition: HGVHistoProducerAlgo.cc:1493
HGVHistoProducerAlgo::maxDisToSeedperthickperlayer_
double maxDisToSeedperthickperlayer_
Definition: HGVHistoProducerAlgo.h:379
HGVHistoProducerAlgo::recHitTools_
std::shared_ptr< hgcal::RecHitTools > recHitTools_
Definition: HGVHistoProducerAlgo.h:339
phase1PixelTopology::layer
constexpr std::array< uint8_t, layerIndexSize > layer
Definition: phase1PixelTopology.h:99
AlCaHLTBitMon_QueryRunRegistry.string
string
Definition: AlCaHLTBitMon_QueryRunRegistry.py:256
HGVHistoProducerAlgo::minDisToSeedperthickperlayer_
double minDisToSeedperthickperlayer_
Definition: HGVHistoProducerAlgo.h:379
HGCRecHit
Definition: HGCRecHit.h:14
HGVHistoProducerAlgo::distance
double distance(const double x1, const double y1, const double x2, const double y2) const
Definition: HGVHistoProducerAlgo.cc:3078
HGVHistoProducerAlgo::nintCellsEneDensperthick_
int nintCellsEneDensperthick_
Definition: HGVHistoProducerAlgo.h:392
HGVHistoProducerAlgo::minPhi_
double minPhi_
Definition: HGVHistoProducerAlgo.h:349
bsc_activity_cfg.clusters
clusters
Definition: bsc_activity_cfg.py:36
testProducerWithPsetDescEmpty_cfi.y2
y2
Definition: testProducerWithPsetDescEmpty_cfi.py:30
edm::helpers::KeyVal::val
V val
Definition: AssociationMapHelpers.h:33
LogDebug
#define LogDebug(id)
Definition: MessageLogger.h:233
edm::ParameterSet
Definition: ParameterSet.h:47
HGVHistoProducerAlgo::maxClEneperthickperlayer_
double maxClEneperthickperlayer_
Definition: HGVHistoProducerAlgo.h:389
CaloParticle
Definition: CaloParticle.h:16
reco::CaloCluster::eta
double eta() const
pseudorapidity of cluster centroid
Definition: CaloCluster.h:181
reco::CaloCluster::hitsAndFractions
const std::vector< std::pair< DetId, float > > & hitsAndFractions() const
Definition: CaloCluster.h:210
SiStripPI::max
Definition: SiStripPayloadInspectorHelper.h:169
edm::AssociationMap
Definition: AssociationMap.h:48
jetUpdater_cfi.sort
sort
Definition: jetUpdater_cfi.py:29
HGVHistoProducerAlgo::nintEneClperlay_
int nintEneClperlay_
Definition: HGVHistoProducerAlgo.h:366
HGVHistoProducerAlgo::maxMCLSharedEneFrac_
double maxMCLSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:371
HGVHistoProducerAlgo::detIdInfoInMultiCluster
Definition: HGVHistoProducerAlgo.h:322
match
std::pair< typename Association::data_type::first_type, double > match(Reference key, Association association, bool bestMatchByMaxValue)
Generic matching function.
Definition: Utils.h:10
HGVHistoProducerAlgo::maxZ_
double maxZ_
Definition: HGVHistoProducerAlgo.h:409
ScoreCutCPtoLC_
const double ScoreCutCPtoLC_
Definition: HGVHistoProducerAlgo.cc:16
HGVHistoProducerAlgo::nintTotNClsinMCLsperlayer_
int nintTotNClsinMCLsperlayer_
Definition: HGVHistoProducerAlgo.h:398
position
static int position[264][3]
Definition: ReadPGInfo.cc:289
dqm::implementation::IBooker::bookInt
MonitorElement * bookInt(TString const &name, FUNC onbooking=NOOP())
Definition: DQMStore.h:73
HGVHistoProducerAlgo::nintClEneperthickperlayer_
int nintClEneperthickperlayer_
Definition: HGVHistoProducerAlgo.h:390
HGVHistoProducerAlgo::maxSharedEneFrac_
double maxSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:369
HGVHistoProducerAlgo::detIdInfoInCluster
Definition: HGVHistoProducerAlgo.h:316
createfilelist.int
int
Definition: createfilelist.py:10
HGVHistoProducerAlgo::minEta_
double minEta_
Definition: HGVHistoProducerAlgo.h:342
HGVHistoProducerAlgo::maxPhi_
double maxPhi_
Definition: HGVHistoProducerAlgo.h:349
trackerHitRTTI::vector
Definition: trackerHitRTTI.h:21
HGVHistoProducerAlgo::maxX_
double maxX_
Definition: HGVHistoProducerAlgo.h:405
PVValHelper::dy
Definition: PVValidationHelpers.h:50
histograms
Definition: histograms.py:1
HGVHistoProducerAlgo::minPt_
double minPt_
Definition: HGVHistoProducerAlgo.h:347
HGVHistoProducerAlgo::nintDisToMaxperthickperlayer_
int nintDisToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:384
CaloParticle::pt
float pt() const
Transverse momentum. Note this is taken from the first SimTrack only.
Definition: CaloParticle.h:130
HGVHistoProducerAlgo::nintTotNClsperthick_
int nintTotNClsperthick_
Definition: HGVHistoProducerAlgo.h:376
HGVHistoProducerAlgo::nintMixedHitsCluster_
int nintMixedHitsCluster_
Definition: HGVHistoProducerAlgo.h:354
HGVHistoProducerAlgo::maxTotNClsperlay_
double maxTotNClsperlay_
Definition: HGVHistoProducerAlgo.h:363
HGVHistoProducerAlgo::minY_
double minY_
Definition: HGVHistoProducerAlgo.h:407
DDAxes::phi
HGVHistoProducerAlgo::maxTotNcellsperthickperlayer_
double maxTotNcellsperthickperlayer_
Definition: HGVHistoProducerAlgo.h:377
Histograms
Definition: Histograms.h:36
HGCalDetId
Definition: HGCalDetId.h:8
HGVHistoProducerAlgo::bookMultiClusterHistos
void bookMultiClusterHistos(DQMStore::IBooker &ibook, Histograms &histograms, unsigned int layers)
Definition: HGVHistoProducerAlgo.cc:966
eostools.move
def move(src, dest)
Definition: eostools.py:511
std
Definition: JetResolutionObject.h:76
DetId::rawId
constexpr uint32_t rawId() const
get the raw id
Definition: DetId.h:57
HGVHistoProducerAlgo::maxDisSeedToMaxperthickperlayer_
double maxDisSeedToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:387
HGVHistoProducerAlgo::nintEta_
int nintEta_
Definition: HGVHistoProducerAlgo.h:343
HGVHistoProducerAlgo::maxCellsEneDensperthick_
double maxCellsEneDensperthick_
Definition: HGVHistoProducerAlgo.h:391
tier0.unique
def unique(seq, keepstr=True)
Definition: tier0.py:24
HGVHistoProducerAlgo::maxTotNMCLs_
double maxTotNMCLs_
Definition: HGVHistoProducerAlgo.h:393
HGVHistoProducerAlgo::nintY_
int nintY_
Definition: HGVHistoProducerAlgo.h:408
DetId::HGCalHSc
Definition: DetId.h:34
HGVHistoProducerAlgo::minTotNClsperlay_
double minTotNClsperlay_
Definition: HGVHistoProducerAlgo.h:363
HGVHistoProducerAlgo::minDisToMaxperthickperlayerenewei_
double minDisToMaxperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:385
dqm::implementation::IBooker::book2D
MonitorElement * book2D(TString const &name, TString const &title, int nchX, double lowX, double highX, int nchY, double lowY, double highY, FUNC onbooking=NOOP())
Definition: DQMStore.h:177
relativeConstraints.empty
bool empty
Definition: relativeConstraints.py:46
HGVHistoProducerAlgo::minCellsEneDensperthick_
double minCellsEneDensperthick_
Definition: HGVHistoProducerAlgo.h:391
HGVHistoProducerAlgo::minZpos_
double minZpos_
Definition: HGVHistoProducerAlgo.h:359
HGVHistoProducerAlgo::minEne_
double minEne_
Definition: HGVHistoProducerAlgo.h:345
HGVHistoProducerAlgo::findmaxhit
DetId findmaxhit(const reco::CaloCluster &cluster, std::unordered_map< DetId, const HGCRecHit * > const &) const
Definition: HGVHistoProducerAlgo.cc:3089
HGVHistoProducerAlgo::minTotNcellsperthickperlayer_
double minTotNcellsperthickperlayer_
Definition: HGVHistoProducerAlgo.h:377
dqm::implementation::IBooker
Definition: DQMStore.h:43
HGVHistoProducerAlgo::distance2
double distance2(const double x1, const double y1, const double x2, const double y2) const
Definition: HGVHistoProducerAlgo.cc:3070
SimCluster::hits_and_fractions
std::vector< std::pair< uint32_t, float > > hits_and_fractions() const
Returns list of rechit IDs and fractions for this SimCluster.
Definition: SimCluster.h:184
HGVHistoProducerAlgo::multiClusters_to_CaloParticles
void multiClusters_to_CaloParticles(const Histograms &histograms, int count, const std::vector< reco::HGCalMultiCluster > &multiClusters, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::vector< size_t > const &cPSelectedIndices, std::unordered_map< DetId, const HGCRecHit * > const &, unsigned int layers) const
Definition: HGVHistoProducerAlgo.cc:2277
HGVHistoProducerAlgo::maxDisToMaxperthickperlayer_
double maxDisToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:383
HGVHistoProducerAlgo::maxEne_
double maxEne_
Definition: HGVHistoProducerAlgo.h:345
HGVHistoProducerAlgo::maxScore_
double maxScore_
Definition: HGVHistoProducerAlgo.h:367
HGVHistoProducerAlgo::fill_multi_cluster_histos
void fill_multi_cluster_histos(const Histograms &histograms, int count, const std::vector< reco::HGCalMultiCluster > &multiClusters, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::vector< size_t > const &cPSelectedIndices, std::unordered_map< DetId, const HGCRecHit * > const &, unsigned int layers) const
Definition: HGVHistoProducerAlgo.cc:2884
HGVHistoProducerAlgo::fill_info_histos
void fill_info_histos(const Histograms &histograms, unsigned int layers) const
Definition: HGVHistoProducerAlgo.cc:1206
HGCalValidator_cfi.simVertices
simVertices
Definition: HGCalValidator_cfi.py:57
offlineSlimmedPrimaryVertices_cfi.score
score
Definition: offlineSlimmedPrimaryVertices_cfi.py:6
HGVHistoProducerAlgo::bookSimClusterHistos
void bookSimClusterHistos(DQMStore::IBooker &ibook, Histograms &histograms, unsigned int layers, std::vector< int > thicknesses)
Definition: HGVHistoProducerAlgo.cc:285
HGVHistoProducerAlgo::nintDisToSeedperthickperlayerenewei_
int nintDisToSeedperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:382
c
auto & c
Definition: CAHitNtupletGeneratorKernelsImpl.h:46
dqmiolumiharvest.j
j
Definition: dqmiolumiharvest.py:66
DetId::Forward
Definition: DetId.h:30
CaloParticle::simClusters
const SimClusterRefVector & simClusters() const
Definition: CaloParticle.h:72
HGVHistoProducerAlgo::layerClusters_to_SimClusters
void layerClusters_to_SimClusters(const Histograms &histograms, int count, edm::Handle< reco::CaloClusterCollection > clusterHandle, const reco::CaloClusterCollection &clusters, edm::Handle< std::vector< SimCluster >> simClusterHandle, std::vector< SimCluster > const &simclusters, std::vector< size_t > const &sCIndices, const std::vector< float > &mask, std::unordered_map< DetId, const HGCRecHit * > const &, unsigned int layers, const hgcal::RecoToSimCollectionWithSimClusters &recSimColl, const hgcal::SimToRecoCollectionWithSimClusters &simRecColl) const
Definition: HGVHistoProducerAlgo.cc:1767
HGVHistoProducerAlgo::nintX_
int nintX_
Definition: HGVHistoProducerAlgo.h:406
EgammaValidation_cff.pdgid
pdgid
Definition: EgammaValidation_cff.py:30
HGVHistoProducerAlgo::minTotNClsperthick_
double minTotNClsperthick_
Definition: HGVHistoProducerAlgo.h:375
edm::RefVector::size
size_type size() const
Size of the RefVector.
Definition: RefVector.h:102
HLT_FULL_cff.distance
distance
Definition: HLT_FULL_cff.py:7733
HGVHistoProducerAlgo::minTotNMCLs_
double minTotNMCLs_
Definition: HGVHistoProducerAlgo.h:393
HGVHistoProducerAlgoHistograms
Definition: HGVHistoProducerAlgo.h:31
HGVHistoProducerAlgo::bookClusterHistos_CellLevel
void bookClusterHistos_CellLevel(DQMStore::IBooker &ibook, Histograms &histograms, unsigned int layers, std::vector< int > thicknesses)
Definition: HGVHistoProducerAlgo.cc:858
PVValHelper::dx
Definition: PVValidationHelpers.h:49
HGVHistoProducerAlgo::minScore_
double minScore_
Definition: HGVHistoProducerAlgo.h:367
hgcalTopologyTester_cfi.layers
layers
Definition: hgcalTopologyTester_cfi.py:8
HGVHistoProducerAlgo::maxMixedHitsSimCluster_
double maxMixedHitsSimCluster_
Definition: HGVHistoProducerAlgo.h:351
HGVHistoProducerAlgo::minDisSeedToMaxperthickperlayer_
double minDisSeedToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:387
HGVHistoProducerAlgo::minDisToSeedperthickperlayerenewei_
double minDisToSeedperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:381
hit
Definition: SiStripHitEffFromCalibTree.cc:88
muonDTDigis_cfi.pset
pset
Definition: muonDTDigis_cfi.py:27
dqm::implementation::IBooker::book1D
MonitorElement * book1D(TString const &name, TString const &title, int const nchX, double const lowX, double const highX, FUNC onbooking=NOOP())
Definition: DQMStore.h:98
HGVHistoProducerAlgo::minMplofLCs_
double minMplofLCs_
Definition: HGVHistoProducerAlgo.h:399
HGVHistoProducerAlgo::minMCLSharedEneFrac_
double minMCLSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:371
Density
hgcal_clustering::Density Density
Definition: HGCalImagingAlgo.h:29
GetRecoTauVFromDQM_MC_cff.next
next
Definition: GetRecoTauVFromDQM_MC_cff.py:31
HGVHistoProducerAlgo::maxTotNClsinMCLs_
double maxTotNClsinMCLs_
Definition: HGVHistoProducerAlgo.h:395