CMS 3D CMS Logo

HGVHistoProducerAlgo.cc
Go to the documentation of this file.
1 #include <numeric>
2 #include <iomanip>
3 
7 #include "TMath.h"
8 #include "TLatex.h"
9 #include "TF1.h"
10 
11 using namespace std;
12 
13 //Parameters for the score cut. Later, this will become part of the
14 //configuration parameter for the HGCAL associator.
15 const double ScoreCutLCtoCP_ = 0.1;
16 const double ScoreCutCPtoLC_ = 0.1;
17 const double ScoreCutLCtoSC_ = 0.1;
18 const double ScoreCutSCtoLC_ = 0.1;
19 const double ScoreCutTSTtoCPFakeMerge_ = 0.6;
20 const double ScoreCutCPtoTSTEffDup_ = 0.2;
21 
23  : //parameters for eta
24  minEta_(pset.getParameter<double>("minEta")),
25  maxEta_(pset.getParameter<double>("maxEta")),
26  nintEta_(pset.getParameter<int>("nintEta")),
27  useFabsEta_(pset.getParameter<bool>("useFabsEta")),
28 
29  //parameters for energy
30  minEne_(pset.getParameter<double>("minEne")),
31  maxEne_(pset.getParameter<double>("maxEne")),
32  nintEne_(pset.getParameter<int>("nintEne")),
33 
34  //parameters for pt
35  minPt_(pset.getParameter<double>("minPt")),
36  maxPt_(pset.getParameter<double>("maxPt")),
37  nintPt_(pset.getParameter<int>("nintPt")),
38 
39  //parameters for phi
40  minPhi_(pset.getParameter<double>("minPhi")),
41  maxPhi_(pset.getParameter<double>("maxPhi")),
42  nintPhi_(pset.getParameter<int>("nintPhi")),
43 
44  //parameters for counting mixed hits SimClusters
45  minMixedHitsSimCluster_(pset.getParameter<double>("minMixedHitsSimCluster")),
46  maxMixedHitsSimCluster_(pset.getParameter<double>("maxMixedHitsSimCluster")),
47  nintMixedHitsSimCluster_(pset.getParameter<int>("nintMixedHitsSimCluster")),
48 
49  //parameters for counting mixed hits clusters
50  minMixedHitsCluster_(pset.getParameter<double>("minMixedHitsCluster")),
51  maxMixedHitsCluster_(pset.getParameter<double>("maxMixedHitsCluster")),
52  nintMixedHitsCluster_(pset.getParameter<int>("nintMixedHitsCluster")),
53 
54  //parameters for the total amount of energy clustered by all layer clusters (fraction over CaloParticless)
55  minEneCl_(pset.getParameter<double>("minEneCl")),
56  maxEneCl_(pset.getParameter<double>("maxEneCl")),
57  nintEneCl_(pset.getParameter<int>("nintEneCl")),
58 
59  //parameters for the longitudinal depth barycenter.
60  minLongDepBary_(pset.getParameter<double>("minLongDepBary")),
61  maxLongDepBary_(pset.getParameter<double>("maxLongDepBary")),
62  nintLongDepBary_(pset.getParameter<int>("nintLongDepBary")),
63 
64  //parameters for z positionof vertex plots
65  minZpos_(pset.getParameter<double>("minZpos")),
66  maxZpos_(pset.getParameter<double>("maxZpos")),
67  nintZpos_(pset.getParameter<int>("nintZpos")),
68 
69  //Parameters for the total number of SimClusters per layer
70  minTotNsimClsperlay_(pset.getParameter<double>("minTotNsimClsperlay")),
71  maxTotNsimClsperlay_(pset.getParameter<double>("maxTotNsimClsperlay")),
72  nintTotNsimClsperlay_(pset.getParameter<int>("nintTotNsimClsperlay")),
73 
74  //Parameters for the total number of layer clusters per layer
75  minTotNClsperlay_(pset.getParameter<double>("minTotNClsperlay")),
76  maxTotNClsperlay_(pset.getParameter<double>("maxTotNClsperlay")),
77  nintTotNClsperlay_(pset.getParameter<int>("nintTotNClsperlay")),
78 
79  //Parameters for the energy clustered by layer clusters per layer (fraction over CaloParticless)
80  minEneClperlay_(pset.getParameter<double>("minEneClperlay")),
81  maxEneClperlay_(pset.getParameter<double>("maxEneClperlay")),
82  nintEneClperlay_(pset.getParameter<int>("nintEneClperlay")),
83 
84  //Parameters for the score both for:
85  //1. calo particle to layer clusters association per layer
86  //2. layer cluster to calo particles association per layer
87  minScore_(pset.getParameter<double>("minScore")),
88  maxScore_(pset.getParameter<double>("maxScore")),
89  nintScore_(pset.getParameter<int>("nintScore")),
90 
91  //Parameters for shared energy fraction. That is:
92  //1. Fraction of each of the layer clusters energy related to a
93  //calo particle over that calo particle's energy.
94  //2. Fraction of each of the calo particles energy
95  //related to a layer cluster over that layer cluster's energy.
96  minSharedEneFrac_(pset.getParameter<double>("minSharedEneFrac")),
97  maxSharedEneFrac_(pset.getParameter<double>("maxSharedEneFrac")),
98  nintSharedEneFrac_(pset.getParameter<int>("nintSharedEneFrac")),
99  minTSTSharedEneFracEfficiency_(pset.getParameter<double>("minTSTSharedEneFracEfficiency")),
100 
101  //Same as above for Tracksters
102  minTSTSharedEneFrac_(pset.getParameter<double>("minTSTSharedEneFrac")),
103  maxTSTSharedEneFrac_(pset.getParameter<double>("maxTSTSharedEneFrac")),
104  nintTSTSharedEneFrac_(pset.getParameter<int>("nintTSTSharedEneFrac")),
105 
106  //Parameters for the total number of SimClusters per thickness
107  minTotNsimClsperthick_(pset.getParameter<double>("minTotNsimClsperthick")),
108  maxTotNsimClsperthick_(pset.getParameter<double>("maxTotNsimClsperthick")),
109  nintTotNsimClsperthick_(pset.getParameter<int>("nintTotNsimClsperthick")),
110 
111  //Parameters for the total number of layer clusters per thickness
112  minTotNClsperthick_(pset.getParameter<double>("minTotNClsperthick")),
113  maxTotNClsperthick_(pset.getParameter<double>("maxTotNClsperthick")),
114  nintTotNClsperthick_(pset.getParameter<int>("nintTotNClsperthick")),
115 
116  //Parameters for the total number of cells per per thickness per layer
117  minTotNcellsperthickperlayer_(pset.getParameter<double>("minTotNcellsperthickperlayer")),
118  maxTotNcellsperthickperlayer_(pset.getParameter<double>("maxTotNcellsperthickperlayer")),
119  nintTotNcellsperthickperlayer_(pset.getParameter<int>("nintTotNcellsperthickperlayer")),
120 
121  //Parameters for the distance of cluster cells to seed cell per thickness per layer
122  minDisToSeedperthickperlayer_(pset.getParameter<double>("minDisToSeedperthickperlayer")),
123  maxDisToSeedperthickperlayer_(pset.getParameter<double>("maxDisToSeedperthickperlayer")),
124  nintDisToSeedperthickperlayer_(pset.getParameter<int>("nintDisToSeedperthickperlayer")),
125 
126  //Parameters for the energy weighted distance of cluster cells to seed cell per thickness per layer
127  minDisToSeedperthickperlayerenewei_(pset.getParameter<double>("minDisToSeedperthickperlayerenewei")),
128  maxDisToSeedperthickperlayerenewei_(pset.getParameter<double>("maxDisToSeedperthickperlayerenewei")),
129  nintDisToSeedperthickperlayerenewei_(pset.getParameter<int>("nintDisToSeedperthickperlayerenewei")),
130 
131  //Parameters for the distance of cluster cells to max cell per thickness per layer
132  minDisToMaxperthickperlayer_(pset.getParameter<double>("minDisToMaxperthickperlayer")),
133  maxDisToMaxperthickperlayer_(pset.getParameter<double>("maxDisToMaxperthickperlayer")),
134  nintDisToMaxperthickperlayer_(pset.getParameter<int>("nintDisToMaxperthickperlayer")),
135 
136  //Parameters for the energy weighted distance of cluster cells to max cell per thickness per layer
137  minDisToMaxperthickperlayerenewei_(pset.getParameter<double>("minDisToMaxperthickperlayerenewei")),
138  maxDisToMaxperthickperlayerenewei_(pset.getParameter<double>("maxDisToMaxperthickperlayerenewei")),
139  nintDisToMaxperthickperlayerenewei_(pset.getParameter<int>("nintDisToMaxperthickperlayerenewei")),
140 
141  //Parameters for the distance of seed cell to max cell per thickness per layer
142  minDisSeedToMaxperthickperlayer_(pset.getParameter<double>("minDisSeedToMaxperthickperlayer")),
143  maxDisSeedToMaxperthickperlayer_(pset.getParameter<double>("maxDisSeedToMaxperthickperlayer")),
144  nintDisSeedToMaxperthickperlayer_(pset.getParameter<int>("nintDisSeedToMaxperthickperlayer")),
145 
146  //Parameters for the energy of a cluster per thickness per layer
147  minClEneperthickperlayer_(pset.getParameter<double>("minClEneperthickperlayer")),
148  maxClEneperthickperlayer_(pset.getParameter<double>("maxClEneperthickperlayer")),
149  nintClEneperthickperlayer_(pset.getParameter<int>("nintClEneperthickperlayer")),
150 
151  //Parameters for the energy density of cluster cells per thickness
152  minCellsEneDensperthick_(pset.getParameter<double>("minCellsEneDensperthick")),
153  maxCellsEneDensperthick_(pset.getParameter<double>("maxCellsEneDensperthick")),
154  nintCellsEneDensperthick_(pset.getParameter<int>("nintCellsEneDensperthick")),
155 
156  //Parameters for the total number of Tracksters per event
157  //We always treet one event as two events, one in +z one in -z
158  minTotNTSTs_(pset.getParameter<double>("minTotNTSTs")),
159  maxTotNTSTs_(pset.getParameter<double>("maxTotNTSTs")),
160  nintTotNTSTs_(pset.getParameter<int>("nintTotNTSTs")),
161 
162  //Parameters for the total number of layer clusters in Trackster
163  minTotNClsinTSTs_(pset.getParameter<double>("minTotNClsinTSTs")),
164  maxTotNClsinTSTs_(pset.getParameter<double>("maxTotNClsinTSTs")),
165  nintTotNClsinTSTs_(pset.getParameter<int>("nintTotNClsinTSTs")),
166 
167  //Parameters for the total number of layer clusters in Trackster per layer
168  minTotNClsinTSTsperlayer_(pset.getParameter<double>("minTotNClsinTSTsperlayer")),
169  maxTotNClsinTSTsperlayer_(pset.getParameter<double>("maxTotNClsinTSTsperlayer")),
170  nintTotNClsinTSTsperlayer_(pset.getParameter<int>("nintTotNClsinTSTsperlayer")),
171 
172  //Parameters for the multiplicity of layer clusters in Trackster
173  minMplofLCs_(pset.getParameter<double>("minMplofLCs")),
174  maxMplofLCs_(pset.getParameter<double>("maxMplofLCs")),
175  nintMplofLCs_(pset.getParameter<int>("nintMplofLCs")),
176 
177  //Parameters for cluster size
178  minSizeCLsinTSTs_(pset.getParameter<double>("minSizeCLsinTSTs")),
179  maxSizeCLsinTSTs_(pset.getParameter<double>("maxSizeCLsinTSTs")),
180  nintSizeCLsinTSTs_(pset.getParameter<int>("nintSizeCLsinTSTs")),
181 
182  //Parameters for the energy of a cluster per thickness per layer
183  minClEnepermultiplicity_(pset.getParameter<double>("minClEnepermultiplicity")),
184  maxClEnepermultiplicity_(pset.getParameter<double>("maxClEnepermultiplicity")),
185  nintClEnepermultiplicity_(pset.getParameter<int>("nintClEnepermultiplicity")),
186 
187  //parameters for x
188  minX_(pset.getParameter<double>("minX")),
189  maxX_(pset.getParameter<double>("maxX")),
190  nintX_(pset.getParameter<int>("nintX")),
191 
192  //parameters for y
193  minY_(pset.getParameter<double>("minY")),
194  maxY_(pset.getParameter<double>("maxY")),
195  nintY_(pset.getParameter<int>("nintY")),
196 
197  //parameters for z
198  minZ_(pset.getParameter<double>("minZ")),
199  maxZ_(pset.getParameter<double>("maxZ")),
200  nintZ_(pset.getParameter<int>("nintZ")) {}
201 
203 
205  histograms.lastLayerEEzm = ibook.bookInt("lastLayerEEzm");
206  histograms.lastLayerFHzm = ibook.bookInt("lastLayerFHzm");
207  histograms.maxlayerzm = ibook.bookInt("maxlayerzm");
208  histograms.lastLayerEEzp = ibook.bookInt("lastLayerEEzp");
209  histograms.lastLayerFHzp = ibook.bookInt("lastLayerFHzp");
210  histograms.maxlayerzp = ibook.bookInt("maxlayerzp");
211 }
212 
215  int pdgid,
216  unsigned int layers) {
217  histograms.h_caloparticle_eta[pdgid] =
218  ibook.book1D("N of caloparticle vs eta", "N of caloParticles vs eta", nintEta_, minEta_, maxEta_);
219  histograms.h_caloparticle_eta_Zorigin[pdgid] =
220  ibook.book2D("Eta vs Zorigin", "Eta vs Zorigin", nintEta_, minEta_, maxEta_, nintZpos_, minZpos_, maxZpos_);
221 
222  histograms.h_caloparticle_energy[pdgid] =
223  ibook.book1D("Energy", "Energy of CaloParticles", nintEne_, minEne_, maxEne_);
224  histograms.h_caloparticle_pt[pdgid] = ibook.book1D("Pt", "Pt of CaloParticles", nintPt_, minPt_, maxPt_);
225  histograms.h_caloparticle_phi[pdgid] = ibook.book1D("Phi", "Phi of CaloParticles", nintPhi_, minPhi_, maxPhi_);
226  histograms.h_caloparticle_selfenergy[pdgid] =
227  ibook.book1D("SelfEnergy", "Total Energy of Hits in Sim Clusters (matched)", nintEne_, minEne_, maxEne_);
228  histograms.h_caloparticle_energyDifference[pdgid] =
229  ibook.book1D("EnergyDifference", "(Energy-SelfEnergy)/Energy", 300., -5., 1.);
230 
231  histograms.h_caloparticle_nSimClusters[pdgid] =
232  ibook.book1D("Num Sim Clusters", "Num Sim Clusters in CaloParticles", 100, 0., 100.);
233  histograms.h_caloparticle_nHitsInSimClusters[pdgid] =
234  ibook.book1D("Num Hits in Sim Clusters", "Num Hits in Sim Clusters in CaloParticles", 1000, 0., 1000.);
235  histograms.h_caloparticle_nHitsInSimClusters_matchedtoRecHit[pdgid] = ibook.book1D(
236  "Num Rec-matched Hits in Sim Clusters", "Num Hits in Sim Clusters (matched) in CaloParticles", 1000, 0., 1000.);
237 
238  histograms.h_caloparticle_nHits_matched_energy[pdgid] =
239  ibook.book1D("Energy of Rec-matched Hits", "Energy of Hits in Sim Clusters (matched)", 100, 0., 10.);
240  histograms.h_caloparticle_nHits_matched_energy_layer[pdgid] =
241  ibook.book2D("Energy of Rec-matched Hits vs layer",
242  "Energy of Hits in Sim Clusters (matched) vs layer",
243  2 * layers,
244  0.,
245  (float)2 * layers,
246  100,
247  0.,
248  10.);
249  histograms.h_caloparticle_nHits_matched_energy_layer_1SimCl[pdgid] =
250  ibook.book2D("Energy of Rec-matched Hits vs layer (1SC)",
251  "Energy of Hits only 1 Sim Clusters (matched) vs layer",
252  2 * layers,
253  0.,
254  (float)2 * layers,
255  100,
256  0.,
257  10.);
258  histograms.h_caloparticle_sum_energy_layer[pdgid] =
259  ibook.book2D("Rec-matched Hits Sum Energy vs layer",
260  "Rescaled Sum Energy of Hits in Sim Clusters (matched) vs layer",
261  2 * layers,
262  0.,
263  (float)2 * layers,
264  110,
265  0.,
266  110.);
267 
268  histograms.h_caloparticle_firstlayer[pdgid] =
269  ibook.book1D("First Layer", "First layer of the CaloParticles", 2 * layers, 0., (float)2 * layers);
270  histograms.h_caloparticle_lastlayer[pdgid] =
271  ibook.book1D("Last Layer", "Last layer of the CaloParticles", 2 * layers, 0., (float)2 * layers);
272  histograms.h_caloparticle_layersnum[pdgid] =
273  ibook.book1D("Number of Layers", "Number of layers of the CaloParticles", 2 * layers, 0., (float)2 * layers);
274  histograms.h_caloparticle_firstlayer_matchedtoRecHit[pdgid] = ibook.book1D(
275  "First Layer (rec-matched hit)", "First layer of the CaloParticles (matched)", 2 * layers, 0., (float)2 * layers);
276  histograms.h_caloparticle_lastlayer_matchedtoRecHit[pdgid] = ibook.book1D(
277  "Last Layer (rec-matched hit)", "Last layer of the CaloParticles (matched)", 2 * layers, 0., (float)2 * layers);
278  histograms.h_caloparticle_layersnum_matchedtoRecHit[pdgid] =
279  ibook.book1D("Number of Layers (rec-matched hit)",
280  "Number of layers of the CaloParticles (matched)",
281  2 * layers,
282  0.,
283  (float)2 * layers);
284 }
285 
288  unsigned int layers,
289  std::vector<int> thicknesses) {
290  //---------------------------------------------------------------------------------------------------------------------------
291  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
292  auto istr1 = std::to_string(ilayer);
293  while (istr1.size() < 2) {
294  istr1.insert(0, "0");
295  }
296  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
297  std::string istr2 = "";
298  //First with the -z endcap
299  if (ilayer < layers) {
300  istr2 = std::to_string(ilayer + 1) + " in z-";
301  } else { //Then for the +z
302  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
303  }
304  histograms.h_simclusternum_perlayer[ilayer] = ibook.book1D("totsimclusternum_layer_" + istr1,
305  "total number of SimClusters for layer " + istr2,
309 
310  } //end of loop over layers
311  //---------------------------------------------------------------------------------------------------------------------------
312  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
313  auto istr = std::to_string(*it);
314  histograms.h_simclusternum_perthick[(*it)] = ibook.book1D("totsimclusternum_thick_" + istr,
315  "total number of simclusters for thickness " + istr,
319  } //end of loop over thicknesses
320 
321  //---------------------------------------------------------------------------------------------------------------------------
322  //z-
323  histograms.h_mixedhitssimcluster_zminus =
324  ibook.book1D("mixedhitssimcluster_zminus",
325  "N of simclusters that contain hits of more than one kind in z-",
329  //z+
330  histograms.h_mixedhitssimcluster_zplus =
331  ibook.book1D("mixedhitssimcluster_zplus",
332  "N of simclusters that contain hits of more than one kind in z+",
336 }
337 
340  unsigned int layers,
341  std::vector<int> thicknesses) {
342  std::unordered_map<int, dqm::reco::MonitorElement*> denom_layercl_in_simcl_eta_perlayer;
343  denom_layercl_in_simcl_eta_perlayer.clear();
344  std::unordered_map<int, dqm::reco::MonitorElement*> denom_layercl_in_simcl_phi_perlayer;
345  denom_layercl_in_simcl_phi_perlayer.clear();
346  std::unordered_map<int, dqm::reco::MonitorElement*> score_layercl2simcluster_perlayer;
347  score_layercl2simcluster_perlayer.clear();
348  std::unordered_map<int, dqm::reco::MonitorElement*> sharedenergy_layercl2simcluster_perlayer;
349  sharedenergy_layercl2simcluster_perlayer.clear();
350  std::unordered_map<int, dqm::reco::MonitorElement*> energy_vs_score_layercl2simcluster_perlayer;
351  energy_vs_score_layercl2simcluster_perlayer.clear();
352  std::unordered_map<int, dqm::reco::MonitorElement*> num_layercl_in_simcl_eta_perlayer;
353  num_layercl_in_simcl_eta_perlayer.clear();
354  std::unordered_map<int, dqm::reco::MonitorElement*> num_layercl_in_simcl_phi_perlayer;
355  num_layercl_in_simcl_phi_perlayer.clear();
356  std::unordered_map<int, dqm::reco::MonitorElement*> numMerge_layercl_in_simcl_eta_perlayer;
357  numMerge_layercl_in_simcl_eta_perlayer.clear();
358  std::unordered_map<int, dqm::reco::MonitorElement*> numMerge_layercl_in_simcl_phi_perlayer;
359  numMerge_layercl_in_simcl_phi_perlayer.clear();
360  std::unordered_map<int, dqm::reco::MonitorElement*> sharedenergy_layercl2simcluster_vs_eta_perlayer;
361  sharedenergy_layercl2simcluster_vs_eta_perlayer.clear();
362  std::unordered_map<int, dqm::reco::MonitorElement*> sharedenergy_layercl2simcluster_vs_phi_perlayer;
363  sharedenergy_layercl2simcluster_vs_phi_perlayer.clear();
364  std::unordered_map<int, dqm::reco::MonitorElement*> denom_simcluster_eta_perlayer;
365  denom_simcluster_eta_perlayer.clear();
366  std::unordered_map<int, dqm::reco::MonitorElement*> denom_simcluster_phi_perlayer;
367  denom_simcluster_phi_perlayer.clear();
368  std::unordered_map<int, dqm::reco::MonitorElement*> score_simcluster2layercl_perlayer;
369  score_simcluster2layercl_perlayer.clear();
370  std::unordered_map<int, dqm::reco::MonitorElement*> sharedenergy_simcluster2layercl_perlayer;
371  sharedenergy_simcluster2layercl_perlayer.clear();
372  std::unordered_map<int, dqm::reco::MonitorElement*> energy_vs_score_simcluster2layercl_perlayer;
373  energy_vs_score_simcluster2layercl_perlayer.clear();
374  std::unordered_map<int, dqm::reco::MonitorElement*> num_simcluster_eta_perlayer;
375  num_simcluster_eta_perlayer.clear();
376  std::unordered_map<int, dqm::reco::MonitorElement*> num_simcluster_phi_perlayer;
377  num_simcluster_phi_perlayer.clear();
378  std::unordered_map<int, dqm::reco::MonitorElement*> numDup_simcluster_eta_perlayer;
379  numDup_simcluster_eta_perlayer.clear();
380  std::unordered_map<int, dqm::reco::MonitorElement*> numDup_simcluster_phi_perlayer;
381  numDup_simcluster_phi_perlayer.clear();
382  std::unordered_map<int, dqm::reco::MonitorElement*> sharedenergy_simcluster2layercl_vs_eta_perlayer;
383  sharedenergy_simcluster2layercl_vs_eta_perlayer.clear();
384  std::unordered_map<int, dqm::reco::MonitorElement*> sharedenergy_simcluster2layercl_vs_phi_perlayer;
385  sharedenergy_simcluster2layercl_vs_phi_perlayer.clear();
386 
387  //---------------------------------------------------------------------------------------------------------------------------
388  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
389  auto istr1 = std::to_string(ilayer);
390  while (istr1.size() < 2) {
391  istr1.insert(0, "0");
392  }
393  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
394  std::string istr2 = "";
395  //First with the -z endcap
396  if (ilayer < layers) {
397  istr2 = std::to_string(ilayer + 1) + " in z-";
398  } else { //Then for the +z
399  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
400  }
401  //-------------------------------------------------------------------------------------------------------------------------
402  denom_layercl_in_simcl_eta_perlayer[ilayer] =
403  ibook.book1D("Denom_LayerCluster_in_SimCluster_Eta_perlayer" + istr1,
404  "Denom LayerCluster in SimCluster Eta per Layer Cluster for layer " + istr2,
405  nintEta_,
406  minEta_,
407  maxEta_);
408  //-------------------------------------------------------------------------------------------------------------------------
409  denom_layercl_in_simcl_phi_perlayer[ilayer] =
410  ibook.book1D("Denom_LayerCluster_in_SimCluster_Phi_perlayer" + istr1,
411  "Denom LayerCluster in SimCluster Phi per Layer Cluster for layer " + istr2,
412  nintPhi_,
413  minPhi_,
414  maxPhi_);
415  //-------------------------------------------------------------------------------------------------------------------------
416  score_layercl2simcluster_perlayer[ilayer] = ibook.book1D("Score_layercl2simcluster_perlayer" + istr1,
417  "Score of Layer Cluster per SimCluster for layer " + istr2,
418  nintScore_,
419  minScore_,
420  maxScore_);
421  //-------------------------------------------------------------------------------------------------------------------------
422  score_simcluster2layercl_perlayer[ilayer] = ibook.book1D("Score_simcluster2layercl_perlayer" + istr1,
423  "Score of SimCluster per Layer Cluster for layer " + istr2,
424  nintScore_,
425  minScore_,
426  maxScore_);
427  //-------------------------------------------------------------------------------------------------------------------------
428  energy_vs_score_simcluster2layercl_perlayer[ilayer] =
429  ibook.book2D("Energy_vs_Score_simcluster2layer_perlayer" + istr1,
430  "Energy vs Score of SimCluster per Layer Cluster for layer " + istr2,
431  nintScore_,
432  minScore_,
433  maxScore_,
437  //-------------------------------------------------------------------------------------------------------------------------
438  energy_vs_score_layercl2simcluster_perlayer[ilayer] =
439  ibook.book2D("Energy_vs_Score_layer2simcluster_perlayer" + istr1,
440  "Energy vs Score of Layer Cluster per SimCluster for layer " + istr2,
441  nintScore_,
442  minScore_,
443  maxScore_,
447  //-------------------------------------------------------------------------------------------------------------------------
448  sharedenergy_simcluster2layercl_perlayer[ilayer] =
449  ibook.book1D("SharedEnergy_simcluster2layercl_perlayer" + istr1,
450  "Shared Energy of SimCluster per Layer Cluster for layer " + istr2,
454  //-------------------------------------------------------------------------------------------------------------------------
455  sharedenergy_simcluster2layercl_vs_eta_perlayer[ilayer] =
456  ibook.bookProfile("SharedEnergy_simcluster2layercl_vs_eta_perlayer" + istr1,
457  "Shared Energy of SimCluster vs #eta per best Layer Cluster for layer " + istr2,
458  nintEta_,
459  minEta_,
460  maxEta_,
463  //-------------------------------------------------------------------------------------------------------------------------
464  sharedenergy_simcluster2layercl_vs_phi_perlayer[ilayer] =
465  ibook.bookProfile("SharedEnergy_simcluster2layercl_vs_phi_perlayer" + istr1,
466  "Shared Energy of SimCluster vs #phi per best Layer Cluster for layer " + istr2,
467  nintPhi_,
468  minPhi_,
469  maxPhi_,
472  //-------------------------------------------------------------------------------------------------------------------------
473  sharedenergy_layercl2simcluster_perlayer[ilayer] =
474  ibook.book1D("SharedEnergy_layercluster2simcluster_perlayer" + istr1,
475  "Shared Energy of Layer Cluster per SimCluster for layer " + istr2,
479  //-------------------------------------------------------------------------------------------------------------------------
480  sharedenergy_layercl2simcluster_vs_eta_perlayer[ilayer] =
481  ibook.bookProfile("SharedEnergy_layercl2simcluster_vs_eta_perlayer" + istr1,
482  "Shared Energy of LayerCluster vs #eta per best SimCluster for layer " + istr2,
483  nintEta_,
484  minEta_,
485  maxEta_,
488  //-------------------------------------------------------------------------------------------------------------------------
489  sharedenergy_layercl2simcluster_vs_phi_perlayer[ilayer] =
490  ibook.bookProfile("SharedEnergy_layercl2simcluster_vs_phi_perlayer" + istr1,
491  "Shared Energy of LayerCluster vs #phi per best SimCluster for layer " + istr2,
492  nintPhi_,
493  minPhi_,
494  maxPhi_,
497  //-------------------------------------------------------------------------------------------------------------------------
498  num_simcluster_eta_perlayer[ilayer] = ibook.book1D("Num_SimCluster_Eta_perlayer" + istr1,
499  "Num SimCluster Eta per Layer Cluster for layer " + istr2,
500  nintEta_,
501  minEta_,
502  maxEta_);
503  //-------------------------------------------------------------------------------------------------------------------------
504  numDup_simcluster_eta_perlayer[ilayer] =
505  ibook.book1D("NumDup_SimCluster_Eta_perlayer" + istr1,
506  "Num Duplicate SimCluster Eta per Layer Cluster for layer " + istr2,
507  nintEta_,
508  minEta_,
509  maxEta_);
510  //-------------------------------------------------------------------------------------------------------------------------
511  denom_simcluster_eta_perlayer[ilayer] = ibook.book1D("Denom_SimCluster_Eta_perlayer" + istr1,
512  "Denom SimCluster Eta per Layer Cluster for layer " + istr2,
513  nintEta_,
514  minEta_,
515  maxEta_);
516  //-------------------------------------------------------------------------------------------------------------------------
517  num_simcluster_phi_perlayer[ilayer] = ibook.book1D("Num_SimCluster_Phi_perlayer" + istr1,
518  "Num SimCluster Phi per Layer Cluster for layer " + istr2,
519  nintPhi_,
520  minPhi_,
521  maxPhi_);
522  //-------------------------------------------------------------------------------------------------------------------------
523  numDup_simcluster_phi_perlayer[ilayer] =
524  ibook.book1D("NumDup_SimCluster_Phi_perlayer" + istr1,
525  "Num Duplicate SimCluster Phi per Layer Cluster for layer " + istr2,
526  nintPhi_,
527  minPhi_,
528  maxPhi_);
529  //-------------------------------------------------------------------------------------------------------------------------
530  denom_simcluster_phi_perlayer[ilayer] = ibook.book1D("Denom_SimCluster_Phi_perlayer" + istr1,
531  "Denom SimCluster Phi per Layer Cluster for layer " + istr2,
532  nintPhi_,
533  minPhi_,
534  maxPhi_);
535  //-------------------------------------------------------------------------------------------------------------------------
536  num_layercl_in_simcl_eta_perlayer[ilayer] =
537  ibook.book1D("Num_LayerCluster_in_SimCluster_Eta_perlayer" + istr1,
538  "Num LayerCluster Eta per Layer Cluster in SimCluster for layer " + istr2,
539  nintEta_,
540  minEta_,
541  maxEta_);
542  //-------------------------------------------------------------------------------------------------------------------------
543  numMerge_layercl_in_simcl_eta_perlayer[ilayer] =
544  ibook.book1D("NumMerge_LayerCluster_in_SimCluster_Eta_perlayer" + istr1,
545  "Num Merge LayerCluster Eta per Layer Cluster in SimCluster for layer " + istr2,
546  nintEta_,
547  minEta_,
548  maxEta_);
549  //-------------------------------------------------------------------------------------------------------------------------
550  num_layercl_in_simcl_phi_perlayer[ilayer] =
551  ibook.book1D("Num_LayerCluster_in_SimCluster_Phi_perlayer" + istr1,
552  "Num LayerCluster Phi per Layer Cluster in SimCluster for layer " + istr2,
553  nintPhi_,
554  minPhi_,
555  maxPhi_);
556  //-------------------------------------------------------------------------------------------------------------------------
557  numMerge_layercl_in_simcl_phi_perlayer[ilayer] =
558  ibook.book1D("NumMerge_LayerCluster_in_SimCluster_Phi_perlayer" + istr1,
559  "Num Merge LayerCluster Phi per Layer Cluster in SimCluster for layer " + istr2,
560  nintPhi_,
561  minPhi_,
562  maxPhi_);
563 
564  } //end of loop over layers
565 
566  histograms.h_denom_layercl_in_simcl_eta_perlayer.push_back(std::move(denom_layercl_in_simcl_eta_perlayer));
567  histograms.h_denom_layercl_in_simcl_phi_perlayer.push_back(std::move(denom_layercl_in_simcl_phi_perlayer));
568  histograms.h_score_layercl2simcluster_perlayer.push_back(std::move(score_layercl2simcluster_perlayer));
569  histograms.h_sharedenergy_layercl2simcluster_perlayer.push_back(std::move(sharedenergy_layercl2simcluster_perlayer));
570  histograms.h_energy_vs_score_layercl2simcluster_perlayer.push_back(
571  std::move(energy_vs_score_layercl2simcluster_perlayer));
572  histograms.h_num_layercl_in_simcl_eta_perlayer.push_back(std::move(num_layercl_in_simcl_eta_perlayer));
573  histograms.h_num_layercl_in_simcl_phi_perlayer.push_back(std::move(num_layercl_in_simcl_phi_perlayer));
574  histograms.h_numMerge_layercl_in_simcl_eta_perlayer.push_back(std::move(numMerge_layercl_in_simcl_eta_perlayer));
575  histograms.h_numMerge_layercl_in_simcl_phi_perlayer.push_back(std::move(numMerge_layercl_in_simcl_phi_perlayer));
576  histograms.h_sharedenergy_layercl2simcluster_vs_eta_perlayer.push_back(
577  std::move(sharedenergy_layercl2simcluster_vs_eta_perlayer));
578  histograms.h_sharedenergy_layercl2simcluster_vs_phi_perlayer.push_back(
579  std::move(sharedenergy_layercl2simcluster_vs_phi_perlayer));
580  histograms.h_denom_simcluster_eta_perlayer.push_back(std::move(denom_simcluster_eta_perlayer));
581  histograms.h_denom_simcluster_phi_perlayer.push_back(std::move(denom_simcluster_phi_perlayer));
582  histograms.h_score_simcluster2layercl_perlayer.push_back(std::move(score_simcluster2layercl_perlayer));
583  histograms.h_sharedenergy_simcluster2layercl_perlayer.push_back(std::move(sharedenergy_simcluster2layercl_perlayer));
584  histograms.h_energy_vs_score_simcluster2layercl_perlayer.push_back(
585  std::move(energy_vs_score_simcluster2layercl_perlayer));
586  histograms.h_num_simcluster_eta_perlayer.push_back(std::move(num_simcluster_eta_perlayer));
587  histograms.h_num_simcluster_phi_perlayer.push_back(std::move(num_simcluster_phi_perlayer));
588  histograms.h_numDup_simcluster_eta_perlayer.push_back(std::move(numDup_simcluster_eta_perlayer));
589  histograms.h_numDup_simcluster_phi_perlayer.push_back(std::move(numDup_simcluster_phi_perlayer));
590  histograms.h_sharedenergy_simcluster2layercl_vs_eta_perlayer.push_back(
591  std::move(sharedenergy_simcluster2layercl_vs_eta_perlayer));
592  histograms.h_sharedenergy_simcluster2layercl_vs_phi_perlayer.push_back(
593  std::move(sharedenergy_simcluster2layercl_vs_phi_perlayer));
594 }
597  unsigned int layers,
598  std::vector<int> thicknesses,
599  std::string pathtomatbudfile) {
600  //---------------------------------------------------------------------------------------------------------------------------
601  histograms.h_cluster_eta.push_back(
602  ibook.book1D("num_reco_cluster_eta", "N of reco clusters vs eta", nintEta_, minEta_, maxEta_));
603 
604  //---------------------------------------------------------------------------------------------------------------------------
605  //z-
606  histograms.h_mixedhitscluster_zminus.push_back(
607  ibook.book1D("mixedhitscluster_zminus",
608  "N of reco clusters that contain hits of more than one kind in z-",
612  //z+
613  histograms.h_mixedhitscluster_zplus.push_back(
614  ibook.book1D("mixedhitscluster_zplus",
615  "N of reco clusters that contain hits of more than one kind in z+",
619 
620  //---------------------------------------------------------------------------------------------------------------------------
621  //z-
622  histograms.h_energyclustered_zminus.push_back(
623  ibook.book1D("energyclustered_zminus",
624  "percent of total energy clustered by all layer clusters over CaloParticless energy in z-",
625  nintEneCl_,
626  minEneCl_,
627  maxEneCl_));
628  //z+
629  histograms.h_energyclustered_zplus.push_back(
630  ibook.book1D("energyclustered_zplus",
631  "percent of total energy clustered by all layer clusters over CaloParticless energy in z+",
632  nintEneCl_,
633  minEneCl_,
634  maxEneCl_));
635 
636  //---------------------------------------------------------------------------------------------------------------------------
637  //z-
638  std::string subpathtomat = pathtomatbudfile.substr(pathtomatbudfile.find("Validation"));
639  histograms.h_longdepthbarycentre_zminus.push_back(
640  ibook.book1D("longdepthbarycentre_zminus",
641  "The longitudinal depth barycentre in z- for " + subpathtomat,
644  maxLongDepBary_));
645  //z+
646  histograms.h_longdepthbarycentre_zplus.push_back(
647  ibook.book1D("longdepthbarycentre_zplus",
648  "The longitudinal depth barycentre in z+ for " + subpathtomat,
651  maxLongDepBary_));
652 
653  //---------------------------------------------------------------------------------------------------------------------------
654  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
655  auto istr1 = std::to_string(ilayer);
656  while (istr1.size() < 2) {
657  istr1.insert(0, "0");
658  }
659  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
660  std::string istr2 = "";
661  //First with the -z endcap
662  if (ilayer < layers) {
663  istr2 = std::to_string(ilayer + 1) + " in z-";
664  } else { //Then for the +z
665  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
666  }
667  histograms.h_clusternum_perlayer[ilayer] = ibook.book1D("totclusternum_layer_" + istr1,
668  "total number of layer clusters for layer " + istr2,
672  histograms.h_energyclustered_perlayer[ilayer] = ibook.book1D(
673  "energyclustered_perlayer" + istr1,
674  "percent of total energy clustered by layer clusters over CaloParticless energy for layer " + istr2,
678  }
679 
680  //---------------------------------------------------------------------------------------------------------------------------
681  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
682  auto istr = std::to_string(*it);
683  histograms.h_clusternum_perthick[(*it)] = ibook.book1D("totclusternum_thick_" + istr,
684  "total number of layer clusters for thickness " + istr,
688  }
689  //---------------------------------------------------------------------------------------------------------------------------
690 }
691 
694  unsigned int layers) {
695  //----------------------------------------------------------------------------------------------------------------------------
696  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
697  auto istr1 = std::to_string(ilayer);
698  while (istr1.size() < 2) {
699  istr1.insert(0, "0");
700  }
701  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
702  std::string istr2 = "";
703  //First with the -z endcap
704  if (ilayer < layers) {
705  istr2 = std::to_string(ilayer + 1) + " in z-";
706  } else { //Then for the +z
707  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
708  }
709  histograms.h_score_layercl2caloparticle_perlayer[ilayer] =
710  ibook.book1D("Score_layercl2caloparticle_perlayer" + istr1,
711  "Score of Layer Cluster per CaloParticle for layer " + istr2,
712  nintScore_,
713  minScore_,
714  maxScore_);
715  histograms.h_score_caloparticle2layercl_perlayer[ilayer] =
716  ibook.book1D("Score_caloparticle2layercl_perlayer" + istr1,
717  "Score of CaloParticle per Layer Cluster for layer " + istr2,
718  nintScore_,
719  minScore_,
720  maxScore_);
721  histograms.h_energy_vs_score_caloparticle2layercl_perlayer[ilayer] =
722  ibook.book2D("Energy_vs_Score_caloparticle2layer_perlayer" + istr1,
723  "Energy vs Score of CaloParticle per Layer Cluster for layer " + istr2,
724  nintScore_,
725  minScore_,
726  maxScore_,
730  histograms.h_energy_vs_score_layercl2caloparticle_perlayer[ilayer] =
731  ibook.book2D("Energy_vs_Score_layer2caloparticle_perlayer" + istr1,
732  "Energy vs Score of Layer Cluster per CaloParticle Layer for layer " + istr2,
733  nintScore_,
734  minScore_,
735  maxScore_,
739  histograms.h_sharedenergy_caloparticle2layercl_perlayer[ilayer] =
740  ibook.book1D("SharedEnergy_caloparticle2layercl_perlayer" + istr1,
741  "Shared Energy of CaloParticle per Layer Cluster for layer " + istr2,
745  histograms.h_sharedenergy_caloparticle2layercl_vs_eta_perlayer[ilayer] =
746  ibook.bookProfile("SharedEnergy_caloparticle2layercl_vs_eta_perlayer" + istr1,
747  "Shared Energy of CaloParticle vs #eta per best Layer Cluster for layer " + istr2,
748  nintEta_,
749  minEta_,
750  maxEta_,
753  histograms.h_sharedenergy_caloparticle2layercl_vs_phi_perlayer[ilayer] =
754  ibook.bookProfile("SharedEnergy_caloparticle2layercl_vs_phi_perlayer" + istr1,
755  "Shared Energy of CaloParticle vs #phi per best Layer Cluster for layer " + istr2,
756  nintPhi_,
757  minPhi_,
758  maxPhi_,
761  histograms.h_sharedenergy_layercl2caloparticle_perlayer[ilayer] =
762  ibook.book1D("SharedEnergy_layercluster2caloparticle_perlayer" + istr1,
763  "Shared Energy of Layer Cluster per Layer Calo Particle for layer " + istr2,
767  histograms.h_sharedenergy_layercl2caloparticle_vs_eta_perlayer[ilayer] =
768  ibook.bookProfile("SharedEnergy_layercl2caloparticle_vs_eta_perlayer" + istr1,
769  "Shared Energy of LayerCluster vs #eta per best Calo Particle for layer " + istr2,
770  nintEta_,
771  minEta_,
772  maxEta_,
775  histograms.h_sharedenergy_layercl2caloparticle_vs_phi_perlayer[ilayer] =
776  ibook.bookProfile("SharedEnergy_layercl2caloparticle_vs_phi_perlayer" + istr1,
777  "Shared Energy of LayerCluster vs #phi per best Calo Particle for layer " + istr2,
778  nintPhi_,
779  minPhi_,
780  maxPhi_,
783  histograms.h_num_caloparticle_eta_perlayer[ilayer] =
784  ibook.book1D("Num_CaloParticle_Eta_perlayer" + istr1,
785  "Num CaloParticle Eta per Layer Cluster for layer " + istr2,
786  nintEta_,
787  minEta_,
788  maxEta_);
789  histograms.h_numDup_caloparticle_eta_perlayer[ilayer] =
790  ibook.book1D("NumDup_CaloParticle_Eta_perlayer" + istr1,
791  "Num Duplicate CaloParticle Eta per Layer Cluster for layer " + istr2,
792  nintEta_,
793  minEta_,
794  maxEta_);
795  histograms.h_denom_caloparticle_eta_perlayer[ilayer] =
796  ibook.book1D("Denom_CaloParticle_Eta_perlayer" + istr1,
797  "Denom CaloParticle Eta per Layer Cluster for layer " + istr2,
798  nintEta_,
799  minEta_,
800  maxEta_);
801  histograms.h_num_caloparticle_phi_perlayer[ilayer] =
802  ibook.book1D("Num_CaloParticle_Phi_perlayer" + istr1,
803  "Num CaloParticle Phi per Layer Cluster for layer " + istr2,
804  nintPhi_,
805  minPhi_,
806  maxPhi_);
807  histograms.h_numDup_caloparticle_phi_perlayer[ilayer] =
808  ibook.book1D("NumDup_CaloParticle_Phi_perlayer" + istr1,
809  "Num Duplicate CaloParticle Phi per Layer Cluster for layer " + istr2,
810  nintPhi_,
811  minPhi_,
812  maxPhi_);
813  histograms.h_denom_caloparticle_phi_perlayer[ilayer] =
814  ibook.book1D("Denom_CaloParticle_Phi_perlayer" + istr1,
815  "Denom CaloParticle Phi per Layer Cluster for layer " + istr2,
816  nintPhi_,
817  minPhi_,
818  maxPhi_);
819  histograms.h_num_layercl_eta_perlayer[ilayer] =
820  ibook.book1D("Num_LayerCluster_Eta_perlayer" + istr1,
821  "Num LayerCluster Eta per Layer Cluster for layer " + istr2,
822  nintEta_,
823  minEta_,
824  maxEta_);
825  histograms.h_numMerge_layercl_eta_perlayer[ilayer] =
826  ibook.book1D("NumMerge_LayerCluster_Eta_perlayer" + istr1,
827  "Num Merge LayerCluster Eta per Layer Cluster for layer " + istr2,
828  nintEta_,
829  minEta_,
830  maxEta_);
831  histograms.h_denom_layercl_eta_perlayer[ilayer] =
832  ibook.book1D("Denom_LayerCluster_Eta_perlayer" + istr1,
833  "Denom LayerCluster Eta per Layer Cluster for layer " + istr2,
834  nintEta_,
835  minEta_,
836  maxEta_);
837  histograms.h_num_layercl_phi_perlayer[ilayer] =
838  ibook.book1D("Num_LayerCluster_Phi_perlayer" + istr1,
839  "Num LayerCluster Phi per Layer Cluster for layer " + istr2,
840  nintPhi_,
841  minPhi_,
842  maxPhi_);
843  histograms.h_numMerge_layercl_phi_perlayer[ilayer] =
844  ibook.book1D("NumMerge_LayerCluster_Phi_perlayer" + istr1,
845  "Num Merge LayerCluster Phi per Layer Cluster for layer " + istr2,
846  nintPhi_,
847  minPhi_,
848  maxPhi_);
849  histograms.h_denom_layercl_phi_perlayer[ilayer] =
850  ibook.book1D("Denom_LayerCluster_Phi_perlayer" + istr1,
851  "Denom LayerCluster Phi per Layer Cluster for layer " + istr2,
852  nintPhi_,
853  minPhi_,
854  maxPhi_);
855  }
856  //---------------------------------------------------------------------------------------------------------------------------
857 }
858 
861  unsigned int layers,
862  std::vector<int> thicknesses) {
863  //----------------------------------------------------------------------------------------------------------------------------
864  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
865  auto istr1 = std::to_string(ilayer);
866  while (istr1.size() < 2) {
867  istr1.insert(0, "0");
868  }
869  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
870  std::string istr2 = "";
871  //First with the -z endcap
872  if (ilayer < layers) {
873  istr2 = std::to_string(ilayer + 1) + " in z-";
874  } else { //Then for the +z
875  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
876  }
877  histograms.h_cellAssociation_perlayer[ilayer] =
878  ibook.book1D("cellAssociation_perlayer" + istr1, "Cell Association for layer " + istr2, 5, -4., 1.);
879  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(2, "TN(purity)");
880  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(3, "FN(ineff.)");
881  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(4, "FP(fake)");
882  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(5, "TP(eff.)");
883  }
884  //----------------------------------------------------------------------------------------------------------------------------
885  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
886  auto istr = std::to_string(*it);
887  histograms.h_cellsenedens_perthick[(*it)] = ibook.book1D("cellsenedens_thick_" + istr,
888  "energy density of cluster cells for thickness " + istr,
892  }
893  //----------------------------------------------------------------------------------------------------------------------------
894  //Not all combination exists but we should keep them all for cross checking reason.
895  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
896  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
897  auto istr1 = std::to_string(*it);
898  auto istr2 = std::to_string(ilayer);
899  while (istr2.size() < 2)
900  istr2.insert(0, "0");
901  auto istr = istr1 + "_" + istr2;
902  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
903  std::string istr3 = "";
904  //First with the -z endcap
905  if (ilayer < layers) {
906  istr3 = std::to_string(ilayer + 1) + " in z- ";
907  } else { //Then for the +z
908  istr3 = std::to_string(ilayer - (layers - 1)) + " in z+ ";
909  }
910  //---
911  histograms.h_cellsnum_perthickperlayer[istr] =
912  ibook.book1D("cellsnum_perthick_perlayer_" + istr,
913  "total number of cells for layer " + istr3 + " for thickness " + istr1,
917  //---
918  histograms.h_distancetoseedcell_perthickperlayer[istr] =
919  ibook.book1D("distancetoseedcell_perthickperlayer_" + istr,
920  "distance of cluster cells to seed cell for layer " + istr3 + " for thickness " + istr1,
924  //---
925  histograms.h_distancetoseedcell_perthickperlayer_eneweighted[istr] = ibook.book1D(
926  "distancetoseedcell_perthickperlayer_eneweighted_" + istr,
927  "energy weighted distance of cluster cells to seed cell for layer " + istr3 + " for thickness " + istr1,
931  //---
932  histograms.h_distancetomaxcell_perthickperlayer[istr] =
933  ibook.book1D("distancetomaxcell_perthickperlayer_" + istr,
934  "distance of cluster cells to max cell for layer " + istr3 + " for thickness " + istr1,
938  //---
939  histograms.h_distancetomaxcell_perthickperlayer_eneweighted[istr] = ibook.book1D(
940  "distancetomaxcell_perthickperlayer_eneweighted_" + istr,
941  "energy weighted distance of cluster cells to max cell for layer " + istr3 + " for thickness " + istr1,
945  //---
946  histograms.h_distancebetseedandmaxcell_perthickperlayer[istr] =
947  ibook.book1D("distancebetseedandmaxcell_perthickperlayer_" + istr,
948  "distance of seed cell to max cell for layer " + istr3 + " for thickness " + istr1,
952  //---
953  histograms.h_distancebetseedandmaxcellvsclusterenergy_perthickperlayer[istr] = ibook.book2D(
954  "distancebetseedandmaxcellvsclusterenergy_perthickperlayer_" + istr,
955  "distance of seed cell to max cell vs cluster energy for layer " + istr3 + " for thickness " + istr1,
962  }
963  }
964 }
965 //----------------------------------------------------------------------------------------------------------------------------
966 
968  histograms.h_score_trackster2caloparticle.push_back(ibook.book1D(
969  "Score_trackster2caloparticle", "Score of Trackster per CaloParticle", nintScore_, minScore_, maxScore_));
970  histograms.h_score_caloparticle2trackster.push_back(ibook.book1D(
971  "Score_caloparticle2trackster", "Score of CaloParticle per Trackster", nintScore_, minScore_, maxScore_));
972  histograms.h_energy_vs_score_trackster2caloparticle.push_back(
973  ibook.book2D("Energy_vs_Score_trackster2caloparticle",
974  "Energy vs Score of Trackster per CaloParticle",
975  nintScore_,
976  minScore_,
977  maxScore_,
981  histograms.h_energy_vs_score_caloparticle2trackster.push_back(
982  ibook.book2D("Energy_vs_Score_caloparticle2trackster",
983  "Energy vs Score of CaloParticle per Trackster",
984  nintScore_,
985  minScore_,
986  maxScore_,
990 
991  //back to all Tracksters
992  histograms.h_num_trackster_eta.push_back(
993  ibook.book1D("Num_Trackster_Eta", "Num Trackster Eta per Trackster ", nintEta_, minEta_, maxEta_));
994  histograms.h_numMerge_trackster_eta.push_back(
995  ibook.book1D("NumMerge_Trackster_Eta", "Num Merge Trackster Eta per Trackster ", nintEta_, minEta_, maxEta_));
996  histograms.h_denom_trackster_eta.push_back(
997  ibook.book1D("Denom_Trackster_Eta", "Denom Trackster Eta per Trackster", nintEta_, minEta_, maxEta_));
998  histograms.h_num_trackster_phi.push_back(
999  ibook.book1D("Num_Trackster_Phi", "Num Trackster Phi per Trackster ", nintPhi_, minPhi_, maxPhi_));
1000  histograms.h_numMerge_trackster_phi.push_back(
1001  ibook.book1D("NumMerge_Trackster_Phi", "Num Merge Trackster Phi per Trackster", nintPhi_, minPhi_, maxPhi_));
1002  histograms.h_denom_trackster_phi.push_back(
1003  ibook.book1D("Denom_Trackster_Phi", "Denom Trackster Phi per Trackster", nintPhi_, minPhi_, maxPhi_));
1004  histograms.h_sharedenergy_trackster2caloparticle.push_back(
1005  ibook.book1D("SharedEnergy_trackster2caloparticle",
1006  "Shared Energy of Trackster per Calo Particle in each layer",
1010  histograms.h_sharedenergy_trackster2caloparticle_vs_eta.push_back(
1011  ibook.bookProfile("SharedEnergy_trackster2caloparticle_vs_eta",
1012  "Shared Energy of Trackster vs #eta per best Calo Particle in each layer",
1013  nintEta_,
1014  minEta_,
1015  maxEta_,
1018  histograms.h_sharedenergy_trackster2caloparticle_vs_phi.push_back(
1019  ibook.bookProfile("SharedEnergy_trackster2caloparticle_vs_phi",
1020  "Shared Energy of Trackster vs #phi per best Calo Particle in each layer",
1021  nintPhi_,
1022  minPhi_,
1023  maxPhi_,
1026  histograms.h_sharedenergy_caloparticle2trackster.push_back(ibook.book1D("SharedEnergy_caloparticle2trackster",
1027  "Shared Energy of CaloParticle per Trackster",
1031  histograms.h_sharedenergy_caloparticle2trackster_assoc.push_back(
1032  ibook.book1D("SharedEnergy_caloparticle2trackster_assoc",
1033  "Shared Energy of Associated CaloParticle per Trackster",
1037  histograms.h_sharedenergy_caloparticle2trackster_vs_eta.push_back(
1038  ibook.bookProfile("SharedEnergy_caloparticle2trackster_vs_eta",
1039  "Shared Energy of CaloParticle vs #eta per best Trackster",
1040  nintEta_,
1041  minEta_,
1042  maxEta_,
1045  histograms.h_sharedenergy_caloparticle2trackster_vs_phi.push_back(
1046  ibook.bookProfile("SharedEnergy_caloparticle2trackster_vs_phi",
1047  "Shared Energy of CaloParticle vs #phi per best Trackster",
1048  nintPhi_,
1049  minPhi_,
1050  maxPhi_,
1053  histograms.h_numEff_caloparticle_eta.push_back(ibook.book1D(
1054  "NumEff_CaloParticle_Eta", "Num Efficiency CaloParticle Eta per Trackster", nintEta_, minEta_, maxEta_));
1055  histograms.h_num_caloparticle_eta.push_back(
1056  ibook.book1D("Num_CaloParticle_Eta", "Num Purity CaloParticle Eta per Trackster", nintEta_, minEta_, maxEta_));
1057  histograms.h_numDup_trackster_eta.push_back(
1058  ibook.book1D("NumDup_Trackster_Eta", "Num Duplicate Trackster vs Eta", nintEta_, minEta_, maxEta_));
1059  histograms.h_denom_caloparticle_eta.push_back(
1060  ibook.book1D("Denom_CaloParticle_Eta", "Denom CaloParticle Eta per Trackster", nintEta_, minEta_, maxEta_));
1061  histograms.h_numEff_caloparticle_phi.push_back(ibook.book1D(
1062  "NumEff_CaloParticle_Phi", "Num Efficiency CaloParticle Phi per Trackster", nintPhi_, minPhi_, maxPhi_));
1063  histograms.h_num_caloparticle_phi.push_back(
1064  ibook.book1D("Num_CaloParticle_Phi", "Num Purity CaloParticle Phi per Trackster", nintPhi_, minPhi_, maxPhi_));
1065  histograms.h_numDup_trackster_phi.push_back(
1066  ibook.book1D("NumDup_Trackster_Phi", "Num Duplicate Trackster vs Phi", nintPhi_, minPhi_, maxPhi_));
1067  histograms.h_denom_caloparticle_phi.push_back(
1068  ibook.book1D("Denom_CaloParticle_Phi", "Denom CaloParticle Phi per Trackster", nintPhi_, minPhi_, maxPhi_));
1069 
1070  std::unordered_map<int, dqm::reco::MonitorElement*> clusternum_in_trackster_perlayer;
1071  clusternum_in_trackster_perlayer.clear();
1072 
1073  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
1074  auto istr1 = std::to_string(ilayer);
1075  while (istr1.size() < 2) {
1076  istr1.insert(0, "0");
1077  }
1078  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
1079  std::string istr2 = "";
1080  //First with the -z endcap
1081  if (ilayer < layers) {
1082  istr2 = std::to_string(ilayer + 1) + " in z-";
1083  } else { //Then for the +z
1084  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
1085  }
1086 
1087  clusternum_in_trackster_perlayer[ilayer] = ibook.book1D("clusternum_in_trackster_perlayer" + istr1,
1088  "Number of layer clusters in Trackster for layer " + istr2,
1092  }
1093 
1094  histograms.h_clusternum_in_trackster_perlayer.push_back(std::move(clusternum_in_trackster_perlayer));
1095 
1096  histograms.h_tracksternum.push_back(
1097  ibook.book1D("tottracksternum", "total number of Tracksters", nintTotNTSTs_, minTotNTSTs_, maxTotNTSTs_));
1098 
1099  histograms.h_conttracksternum.push_back(ibook.book1D(
1100  "conttracksternum", "number of Tracksters with 3 contiguous layers", nintTotNTSTs_, minTotNTSTs_, maxTotNTSTs_));
1101 
1102  histograms.h_nonconttracksternum.push_back(ibook.book1D("nonconttracksternum",
1103  "number of Tracksters without 3 contiguous layers",
1104  nintTotNTSTs_,
1105  minTotNTSTs_,
1106  maxTotNTSTs_));
1107 
1108  histograms.h_clusternum_in_trackster.push_back(ibook.book1D("clusternum_in_trackster",
1109  "total number of layer clusters in Trackster",
1113 
1114  histograms.h_clusternum_in_trackster_vs_layer.push_back(
1115  ibook.bookProfile("clusternum_in_trackster_vs_layer",
1116  "Profile of 2d layer clusters in Trackster vs layer number",
1117  2 * layers,
1118  0.,
1119  2. * layers,
1122 
1123  histograms.h_multiplicityOfLCinTST.push_back(ibook.book2D("multiplicityOfLCinTST",
1124  "Multiplicity vs Layer cluster size in Tracksters",
1125  nintMplofLCs_,
1126  minMplofLCs_,
1127  maxMplofLCs_,
1131 
1132  histograms.h_multiplicity_numberOfEventsHistogram.push_back(ibook.book1D("multiplicity_numberOfEventsHistogram",
1133  "multiplicity numberOfEventsHistogram",
1134  nintMplofLCs_,
1135  minMplofLCs_,
1136  maxMplofLCs_));
1137 
1138  histograms.h_multiplicity_zminus_numberOfEventsHistogram.push_back(
1139  ibook.book1D("multiplicity_zminus_numberOfEventsHistogram",
1140  "multiplicity numberOfEventsHistogram in z-",
1141  nintMplofLCs_,
1142  minMplofLCs_,
1143  maxMplofLCs_));
1144 
1145  histograms.h_multiplicity_zplus_numberOfEventsHistogram.push_back(
1146  ibook.book1D("multiplicity_zplus_numberOfEventsHistogram",
1147  "multiplicity numberOfEventsHistogram in z+",
1148  nintMplofLCs_,
1149  minMplofLCs_,
1150  maxMplofLCs_));
1151 
1152  histograms.h_multiplicityOfLCinTST_vs_layercluster_zminus.push_back(
1153  ibook.book2D("multiplicityOfLCinTST_vs_layercluster_zminus",
1154  "Multiplicity vs Layer number in z-",
1155  nintMplofLCs_,
1156  minMplofLCs_,
1157  maxMplofLCs_,
1158  layers,
1159  0.,
1160  (float)layers));
1161 
1162  histograms.h_multiplicityOfLCinTST_vs_layercluster_zplus.push_back(
1163  ibook.book2D("multiplicityOfLCinTST_vs_layercluster_zplus",
1164  "Multiplicity vs Layer number in z+",
1165  nintMplofLCs_,
1166  minMplofLCs_,
1167  maxMplofLCs_,
1168  layers,
1169  0.,
1170  (float)layers));
1171 
1172  histograms.h_multiplicityOfLCinTST_vs_layerclusterenergy.push_back(
1173  ibook.book2D("multiplicityOfLCinTST_vs_layerclusterenergy",
1174  "Multiplicity vs Layer cluster energy",
1175  nintMplofLCs_,
1176  minMplofLCs_,
1177  maxMplofLCs_,
1181 
1182  histograms.h_trackster_pt.push_back(ibook.book1D("trackster_pt", "Pt of the Trackster", nintPt_, minPt_, maxPt_));
1183  histograms.h_trackster_eta.push_back(
1184  ibook.book1D("trackster_eta", "Eta of the Trackster", nintEta_, minEta_, maxEta_));
1185  histograms.h_trackster_phi.push_back(
1186  ibook.book1D("trackster_phi", "Phi of the Trackster", nintPhi_, minPhi_, maxPhi_));
1187  histograms.h_trackster_energy.push_back(
1188  ibook.book1D("trackster_energy", "Energy of the Trackster", nintEne_, minEne_, maxEne_));
1189  histograms.h_trackster_x.push_back(ibook.book1D("trackster_x", "X position of the Trackster", nintX_, minX_, maxX_));
1190  histograms.h_trackster_y.push_back(ibook.book1D("trackster_y", "Y position of the Trackster", nintY_, minY_, maxY_));
1191  histograms.h_trackster_z.push_back(ibook.book1D("trackster_z", "Z position of the Trackster", nintZ_, minZ_, maxZ_));
1192  histograms.h_trackster_firstlayer.push_back(
1193  ibook.book1D("trackster_firstlayer", "First layer of the Trackster", 2 * layers, 0., (float)2 * layers));
1194  histograms.h_trackster_lastlayer.push_back(
1195  ibook.book1D("trackster_lastlayer", "Last layer of the Trackster", 2 * layers, 0., (float)2 * layers));
1196  histograms.h_trackster_layersnum.push_back(
1197  ibook.book1D("trackster_layersnum", "Number of layers of the Trackster", 2 * layers, 0., (float)2 * layers));
1198 }
1199 
1201  //We will save some info straight from geometry to avoid mistakes from updates
1202  //----------- TODO ----------------------------------------------------------
1203  //For now values returned for 'lastLayerFHzp': '104', 'lastLayerFHzm': '52' are not the one expected.
1204  //Will come back to this when there will be info in CMSSW to put in DQM file.
1205  histograms.lastLayerEEzm->Fill(recHitTools_->lastLayerEE());
1206  histograms.lastLayerFHzm->Fill(recHitTools_->lastLayerFH());
1207  histograms.maxlayerzm->Fill(layers);
1208  histograms.lastLayerEEzp->Fill(recHitTools_->lastLayerEE() + layers);
1209  histograms.lastLayerFHzp->Fill(recHitTools_->lastLayerFH() + layers);
1210  histograms.maxlayerzp->Fill(layers + layers);
1211 }
1212 
1214  int pdgid,
1215  const CaloParticle& caloParticle,
1216  std::vector<SimVertex> const& simVertices,
1217  unsigned int layers,
1218  std::unordered_map<DetId, const HGCRecHit*> const& hitMap) const {
1219  const auto eta = getEta(caloParticle.eta());
1220  if (histograms.h_caloparticle_eta.count(pdgid)) {
1221  histograms.h_caloparticle_eta.at(pdgid)->Fill(eta);
1222  }
1223  if (histograms.h_caloparticle_eta_Zorigin.count(pdgid)) {
1224  histograms.h_caloparticle_eta_Zorigin.at(pdgid)->Fill(
1225  simVertices.at(caloParticle.g4Tracks()[0].vertIndex()).position().z(), eta);
1226  }
1227 
1228  if (histograms.h_caloparticle_energy.count(pdgid)) {
1229  histograms.h_caloparticle_energy.at(pdgid)->Fill(caloParticle.energy());
1230  }
1231  if (histograms.h_caloparticle_pt.count(pdgid)) {
1232  histograms.h_caloparticle_pt.at(pdgid)->Fill(caloParticle.pt());
1233  }
1234  if (histograms.h_caloparticle_phi.count(pdgid)) {
1235  histograms.h_caloparticle_phi.at(pdgid)->Fill(caloParticle.phi());
1236  }
1237 
1238  if (histograms.h_caloparticle_nSimClusters.count(pdgid)) {
1239  histograms.h_caloparticle_nSimClusters.at(pdgid)->Fill(caloParticle.simClusters().size());
1240 
1241  int simHits = 0;
1242  int minLayerId = 999;
1243  int maxLayerId = 0;
1244 
1245  int simHits_matched = 0;
1246  int minLayerId_matched = 999;
1247  int maxLayerId_matched = 0;
1248 
1249  float energy = 0.;
1250  std::map<int, double> totenergy_layer;
1251 
1252  for (auto const& sc : caloParticle.simClusters()) {
1253  LogDebug("HGCalValidator") << " This sim cluster has " << sc->hits_and_fractions().size() << " simHits and "
1254  << sc->energy() << " energy. " << std::endl;
1255  simHits += sc->hits_and_fractions().size();
1256  for (auto const& h_and_f : sc->hits_and_fractions()) {
1257  const auto hitDetId = h_and_f.first;
1258  int layerId =
1259  recHitTools_->getLayerWithOffset(hitDetId) + layers * ((recHitTools_->zside(hitDetId) + 1) >> 1) - 1;
1260  // set to 0 if matched RecHit not found
1261  int layerId_matched_min = 999;
1262  int layerId_matched_max = 0;
1263  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(hitDetId);
1264  if (itcheck != hitMap.end()) {
1265  layerId_matched_min = layerId;
1266  layerId_matched_max = layerId;
1267  simHits_matched++;
1268 
1269  const HGCRecHit* hit = itcheck->second;
1270  energy += hit->energy() * h_and_f.second;
1271  histograms.h_caloparticle_nHits_matched_energy.at(pdgid)->Fill(hit->energy() * h_and_f.second);
1272  histograms.h_caloparticle_nHits_matched_energy_layer.at(pdgid)->Fill(layerId, hit->energy() * h_and_f.second);
1273 
1274  if (totenergy_layer.find(layerId) != totenergy_layer.end()) {
1275  totenergy_layer[layerId] = totenergy_layer.at(layerId) + hit->energy();
1276  } else {
1277  totenergy_layer.emplace(layerId, hit->energy());
1278  }
1279  if (caloParticle.simClusters().size() == 1)
1280  histograms.h_caloparticle_nHits_matched_energy_layer_1SimCl.at(pdgid)->Fill(layerId,
1281  hit->energy() * h_and_f.second);
1282  } else {
1283  LogDebug("HGCalValidator") << " matched to RecHit NOT found !" << std::endl;
1284  }
1285 
1286  minLayerId = std::min(minLayerId, layerId);
1287  maxLayerId = std::max(maxLayerId, layerId);
1288  minLayerId_matched = std::min(minLayerId_matched, layerId_matched_min);
1289  maxLayerId_matched = std::max(maxLayerId_matched, layerId_matched_max);
1290  }
1291  LogDebug("HGCalValidator") << std::endl;
1292  }
1293  histograms.h_caloparticle_firstlayer.at(pdgid)->Fill(minLayerId);
1294  histograms.h_caloparticle_lastlayer.at(pdgid)->Fill(maxLayerId);
1295  histograms.h_caloparticle_layersnum.at(pdgid)->Fill(int(maxLayerId - minLayerId));
1296 
1297  histograms.h_caloparticle_firstlayer_matchedtoRecHit.at(pdgid)->Fill(minLayerId_matched);
1298  histograms.h_caloparticle_lastlayer_matchedtoRecHit.at(pdgid)->Fill(maxLayerId_matched);
1299  histograms.h_caloparticle_layersnum_matchedtoRecHit.at(pdgid)->Fill(int(maxLayerId_matched - minLayerId_matched));
1300 
1301  histograms.h_caloparticle_nHitsInSimClusters.at(pdgid)->Fill((float)simHits);
1302  histograms.h_caloparticle_nHitsInSimClusters_matchedtoRecHit.at(pdgid)->Fill((float)simHits_matched);
1303  histograms.h_caloparticle_selfenergy.at(pdgid)->Fill((float)energy);
1304  histograms.h_caloparticle_energyDifference.at(pdgid)->Fill((float)1. - energy / caloParticle.energy());
1305 
1306  //Calculate sum energy per-layer
1307  auto i = totenergy_layer.begin();
1308  double sum_energy = 0.0;
1309  while (i != totenergy_layer.end()) {
1310  sum_energy += i->second;
1311  histograms.h_caloparticle_sum_energy_layer.at(pdgid)->Fill(i->first, sum_energy / caloParticle.energy() * 100.);
1312  i++;
1313  }
1314  }
1315 }
1316 
1317 void HGVHistoProducerAlgo::HGVHistoProducerAlgo::fill_simCluster_histos(const Histograms& histograms,
1318  std::vector<SimCluster> const& simClusters,
1319  unsigned int layers,
1320  std::vector<int> thicknesses) const {
1321  //Each event to be treated as two events: an event in +ve endcap,
1322  //plus another event in -ve endcap. In this spirit there will be
1323  //a layer variable (layerid) that maps the layers in :
1324  //-z: 0->49
1325  //+z: 50->99
1326 
1327  //To keep track of total num of simClusters per layer
1328  //tnscpl[layerid]
1329  std::vector<int> tnscpl(1000, 0); //tnscpl.clear(); tnscpl.reserve(1000);
1330 
1331  //To keep track of the total num of clusters per thickness in plus and in minus endcaps
1332  std::map<std::string, int> tnscpthplus;
1333  tnscpthplus.clear();
1334  std::map<std::string, int> tnscpthminus;
1335  tnscpthminus.clear();
1336  //At the beginning of the event all layers should be initialized to zero total clusters per thickness
1337  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
1338  tnscpthplus.insert(std::pair<std::string, int>(std::to_string(*it), 0));
1339  tnscpthminus.insert(std::pair<std::string, int>(std::to_string(*it), 0));
1340  }
1341  //To keep track of the total num of simClusters with mixed thickness hits per event
1342  tnscpthplus.insert(std::pair<std::string, int>("mixed", 0));
1343  tnscpthminus.insert(std::pair<std::string, int>("mixed", 0));
1344 
1345  //loop through simClusters
1346  for (unsigned int ic = 0; ic < simClusters.size(); ++ic) {
1347  const auto& sc = simClusters[ic];
1348  const auto& hitsAndFractions = sc.hits_and_fractions();
1349 
1350  //Auxillary variables to count the number of different kind of hits in each simCluster
1351  int nthhits120p = 0;
1352  int nthhits200p = 0;
1353  int nthhits300p = 0;
1354  int nthhitsscintp = 0;
1355  int nthhits120m = 0;
1356  int nthhits200m = 0;
1357  int nthhits300m = 0;
1358  int nthhitsscintm = 0;
1359  //For the hits thickness of the layer cluster.
1360  double thickness = 0.;
1361  //To keep track if we added the simCluster in a specific layer
1362  std::vector<int> occurenceSCinlayer(1000, 0); //[layerid][0 if not added]
1363 
1364  //loop through hits of the simCluster
1365  for (const auto& hAndF : hitsAndFractions) {
1366  const DetId sh_detid = hAndF.first;
1367 
1368  //The layer the cluster belongs to. As mentioned in the mapping above, it takes into account -z and +z.
1369  int layerid =
1370  recHitTools_->getLayerWithOffset(sh_detid) + layers * ((recHitTools_->zside(sh_detid) + 1) >> 1) - 1;
1371  //zside that the current cluster belongs to.
1372  int zside = recHitTools_->zside(sh_detid);
1373 
1374  //add the simCluster to the relevant layer. A SimCluster may give contribution to several layers.
1375  if (occurenceSCinlayer[layerid] == 0) {
1376  tnscpl[layerid]++;
1377  }
1378  occurenceSCinlayer[layerid]++;
1379 
1380  if (sh_detid.det() == DetId::Forward || sh_detid.det() == DetId::HGCalEE || sh_detid.det() == DetId::HGCalHSi) {
1381  thickness = recHitTools_->getSiThickness(sh_detid);
1382  } else if (sh_detid.det() == DetId::HGCalHSc) {
1383  thickness = -1;
1384  } else {
1385  LogDebug("HGCalValidator") << "These are HGCal simClusters, you shouldn't be here !!! " << layerid << "\n";
1386  continue;
1387  }
1388 
1389  if ((thickness == 120.) && (zside > 0.)) {
1390  nthhits120p++;
1391  } else if ((thickness == 120.) && (zside < 0.)) {
1392  nthhits120m++;
1393  } else if ((thickness == 200.) && (zside > 0.)) {
1394  nthhits200p++;
1395  } else if ((thickness == 200.) && (zside < 0.)) {
1396  nthhits200m++;
1397  } else if ((thickness == 300.) && (zside > 0.)) {
1398  nthhits300p++;
1399  } else if ((thickness == 300.) && (zside < 0.)) {
1400  nthhits300m++;
1401  } else if ((thickness == -1) && (zside > 0.)) {
1402  nthhitsscintp++;
1403  } else if ((thickness == -1) && (zside < 0.)) {
1404  nthhitsscintm++;
1405  } else { //assert(0);
1406  LogDebug("HGCalValidator")
1407  << " You are running a geometry that contains thicknesses different than the normal ones. "
1408  << "\n";
1409  }
1410 
1411  } //end of loop through hits
1412 
1413  //Check for simultaneously having hits of different kind. Checking at least two combinations is sufficient.
1414  if ((nthhits120p != 0 && nthhits200p != 0) || (nthhits120p != 0 && nthhits300p != 0) ||
1415  (nthhits120p != 0 && nthhitsscintp != 0) || (nthhits200p != 0 && nthhits300p != 0) ||
1416  (nthhits200p != 0 && nthhitsscintp != 0) || (nthhits300p != 0 && nthhitsscintp != 0)) {
1417  tnscpthplus["mixed"]++;
1418  } else if ((nthhits120p != 0 || nthhits200p != 0 || nthhits300p != 0 || nthhitsscintp != 0)) {
1419  //This is a cluster with hits of one kind
1420  tnscpthplus[std::to_string((int)thickness)]++;
1421  }
1422  if ((nthhits120m != 0 && nthhits200m != 0) || (nthhits120m != 0 && nthhits300m != 0) ||
1423  (nthhits120m != 0 && nthhitsscintm != 0) || (nthhits200m != 0 && nthhits300m != 0) ||
1424  (nthhits200m != 0 && nthhitsscintm != 0) || (nthhits300m != 0 && nthhitsscintm != 0)) {
1425  tnscpthminus["mixed"]++;
1426  } else if ((nthhits120m != 0 || nthhits200m != 0 || nthhits300m != 0 || nthhitsscintm != 0)) {
1427  //This is a cluster with hits of one kind
1428  tnscpthminus[std::to_string((int)thickness)]++;
1429  }
1430 
1431  } //end of loop through SimClusters of the event
1432 
1433  //Per layer : Loop 0->99
1434  for (unsigned ilayer = 0; ilayer < layers * 2; ++ilayer) {
1435  if (histograms.h_simclusternum_perlayer.count(ilayer)) {
1436  histograms.h_simclusternum_perlayer.at(ilayer)->Fill(tnscpl[ilayer]);
1437  }
1438  } //end of loop through layers
1439 
1440  //Per thickness
1441  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
1442  if (histograms.h_simclusternum_perthick.count(*it)) {
1443  histograms.h_simclusternum_perthick.at(*it)->Fill(tnscpthplus[std::to_string(*it)]);
1444  histograms.h_simclusternum_perthick.at(*it)->Fill(tnscpthminus[std::to_string(*it)]);
1445  }
1446  }
1447  //Mixed thickness clusters
1448  histograms.h_mixedhitssimcluster_zplus->Fill(tnscpthplus["mixed"]);
1449  histograms.h_mixedhitssimcluster_zminus->Fill(tnscpthminus["mixed"]);
1450 }
1451 
1452 void HGVHistoProducerAlgo::HGVHistoProducerAlgo::fill_simClusterAssociation_histos(
1453  const Histograms& histograms,
1454  int count,
1457  edm::Handle<std::vector<SimCluster>> simClusterHandle,
1458  std::vector<SimCluster> const& simClusters,
1459  std::vector<size_t> const& sCIndices,
1460  const std::vector<float>& mask,
1461  std::unordered_map<DetId, const HGCRecHit*> const& hitMap,
1462  unsigned int layers,
1463  const hgcal::RecoToSimCollectionWithSimClusters& scsInLayerClusterMap,
1464  const hgcal::SimToRecoCollectionWithSimClusters& lcsInSimClusterMap) const {
1465  //Each event to be treated as two events: an event in +ve endcap,
1466  //plus another event in -ve endcap. In this spirit there will be
1467  //a layer variable (layerid) that maps the layers in :
1468  //-z: 0->49
1469  //+z: 50->99
1470 
1471  //Will add some general plots on the specific mask in the future.
1472 
1473  layerClusters_to_SimClusters(histograms,
1474  count,
1475  clusterHandle,
1476  clusters,
1477  simClusterHandle,
1478  simClusters,
1479  sCIndices,
1480  mask,
1481  hitMap,
1482  layers,
1483  scsInLayerClusterMap,
1484  lcsInSimClusterMap);
1485 }
1486 
1488  int count,
1489  const reco::CaloCluster& cluster) const {
1490  const auto eta = getEta(cluster.eta());
1491  histograms.h_cluster_eta[count]->Fill(eta);
1492 }
1493 
1497  edm::Handle<std::vector<CaloParticle>> caloParticleHandle,
1498  std::vector<CaloParticle> const& cP,
1499  std::vector<size_t> const& cPIndices,
1500  std::vector<size_t> const& cPSelectedIndices,
1501  std::unordered_map<DetId, const HGCRecHit*> const& hitMap,
1502  unsigned int layers,
1503  const hgcal::RecoToSimCollection& cpsInLayerClusterMap,
1504  const hgcal::SimToRecoCollection& cPOnLayerMap) const {
1505  auto nLayerClusters = clusters.size();
1506 
1507  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>> detIdToCaloParticleId_Map;
1508  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>> detIdToLayerClusterId_Map;
1509 
1510  // The association has to be done in an all-vs-all fashion.
1511  // For this reason we use the full set of CaloParticles, with the only filter on bx
1512  for (const auto& cpId : cPIndices) {
1513  const SimClusterRefVector& simClusterRefVector = cP[cpId].simClusters();
1514  for (const auto& it_sc : simClusterRefVector) {
1515  const SimCluster& simCluster = (*(it_sc));
1516  const auto& hits_and_fractions = simCluster.hits_and_fractions();
1517  for (const auto& it_haf : hits_and_fractions) {
1518  DetId hitid = (it_haf.first);
1519  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(hitid);
1520  if (itcheck != hitMap.end()) {
1521  auto hit_find_it = detIdToCaloParticleId_Map.find(hitid);
1522  if (hit_find_it == detIdToCaloParticleId_Map.end()) {
1523  detIdToCaloParticleId_Map[hitid] = std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>();
1524  detIdToCaloParticleId_Map[hitid].emplace_back(
1525  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
1526  } else {
1527  auto findHitIt = std::find(detIdToCaloParticleId_Map[hitid].begin(),
1528  detIdToCaloParticleId_Map[hitid].end(),
1529  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
1530  if (findHitIt != detIdToCaloParticleId_Map[hitid].end()) {
1531  findHitIt->fraction += it_haf.second;
1532  } else {
1533  detIdToCaloParticleId_Map[hitid].emplace_back(
1534  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
1535  }
1536  }
1537  }
1538  }
1539  }
1540  }
1541 
1542  for (unsigned int lcId = 0; lcId < nLayerClusters; ++lcId) {
1543  const std::vector<std::pair<DetId, float>>& hits_and_fractions = clusters[lcId].hitsAndFractions();
1544  unsigned int numberOfHitsInLC = hits_and_fractions.size();
1545 
1546  // This vector will store, for each hit in the Layercluster, the index of
1547  // the CaloParticle that contributed the most, in terms of energy, to it.
1548  // Special values are:
1549  //
1550  // -2 --> the reconstruction fraction of the RecHit is 0 (used in the past to monitor Halo Hits)
1551  // -3 --> same as before with the added condition that no CaloParticle has been linked to this RecHit
1552  // -1 --> the reco fraction is >0, but no CaloParticle has been linked to it
1553  // >=0 --> index of the linked CaloParticle
1554  std::vector<int> hitsToCaloParticleId(numberOfHitsInLC);
1555  const auto firstHitDetId = hits_and_fractions[0].first;
1556  int lcLayerId =
1557  recHitTools_->getLayerWithOffset(firstHitDetId) + layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
1558 
1559  // This will store the fraction of the CaloParticle energy shared with the LayerCluster: e_shared/cp_energy
1560  std::unordered_map<unsigned, float> CPEnergyInLC;
1561 
1562  for (unsigned int hitId = 0; hitId < numberOfHitsInLC; hitId++) {
1563  DetId rh_detid = hits_and_fractions[hitId].first;
1564  const auto rhFraction = hits_and_fractions[hitId].second;
1565 
1566  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
1567  const HGCRecHit* hit = itcheck->second;
1568 
1569  auto hit_find_in_LC = detIdToLayerClusterId_Map.find(rh_detid);
1570  if (hit_find_in_LC == detIdToLayerClusterId_Map.end()) {
1571  detIdToLayerClusterId_Map[rh_detid] = std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>();
1572  }
1573  detIdToLayerClusterId_Map[rh_detid].emplace_back(HGVHistoProducerAlgo::detIdInfoInCluster{lcId, rhFraction});
1574 
1575  auto hit_find_in_CP = detIdToCaloParticleId_Map.find(rh_detid);
1576 
1577  // if the fraction is zero or the hit does not belong to any calo
1578  // particle, set the caloparticleId for the hit to -1 this will
1579  // contribute to the number of noise hits
1580 
1581  // MR Remove the case in which the fraction is 0, since this could be a
1582  // real hit that has been marked as halo.
1583  if (rhFraction == 0.) {
1584  hitsToCaloParticleId[hitId] = -2;
1585  }
1586  if (hit_find_in_CP == detIdToCaloParticleId_Map.end()) {
1587  hitsToCaloParticleId[hitId] -= 1;
1588  } else {
1589  auto maxCPEnergyInLC = 0.f;
1590  auto maxCPId = -1;
1591  for (auto& h : hit_find_in_CP->second) {
1592  CPEnergyInLC[h.clusterId] += h.fraction * hit->energy();
1593  // Keep track of which CaloParticle contributed the most, in terms
1594  // of energy, to this specific LayerCluster.
1595  if (CPEnergyInLC[h.clusterId] > maxCPEnergyInLC) {
1596  maxCPEnergyInLC = CPEnergyInLC[h.clusterId];
1597  maxCPId = h.clusterId;
1598  }
1599  }
1600  hitsToCaloParticleId[hitId] = maxCPId;
1601  }
1602  histograms.h_cellAssociation_perlayer.at(lcLayerId)->Fill(
1603  hitsToCaloParticleId[hitId] > 0. ? 0. : hitsToCaloParticleId[hitId]);
1604  } // End loop over hits on a LayerCluster
1605 
1606  } // End of loop over LayerClusters
1607 
1608  // Here we do fill the plots to compute the different metrics linked to
1609  // reco-level, namely fake-rate an merge-rate. In this loop we should *not*
1610  // restrict only to the selected caloParaticles.
1611  for (unsigned int lcId = 0; lcId < nLayerClusters; ++lcId) {
1612  const std::vector<std::pair<DetId, float>>& hits_and_fractions = clusters[lcId].hitsAndFractions();
1613  const auto firstHitDetId = hits_and_fractions[0].first;
1614  const int lcLayerId =
1615  recHitTools_->getLayerWithOffset(firstHitDetId) + layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
1616  histograms.h_denom_layercl_eta_perlayer.at(lcLayerId)->Fill(clusters[lcId].eta());
1617  histograms.h_denom_layercl_phi_perlayer.at(lcLayerId)->Fill(clusters[lcId].phi());
1618  //
1619  const edm::Ref<reco::CaloClusterCollection> lcRef(clusterHandle, lcId);
1620  const auto& cpsIt = cpsInLayerClusterMap.find(lcRef);
1621  if (cpsIt == cpsInLayerClusterMap.end())
1622  continue;
1623 
1624  const auto& cps = cpsIt->val;
1625  if (clusters[lcId].energy() == 0. && !cps.empty()) {
1626  for (const auto& cpPair : cps) {
1627  histograms.h_score_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(cpPair.second);
1628  }
1629  continue;
1630  }
1631  for (const auto& cpPair : cps) {
1632  LogDebug("HGCalValidator") << "layerCluster Id: \t" << lcId << "\t CP id: \t" << cpPair.first.index()
1633  << "\t score \t" << cpPair.second << std::endl;
1634  histograms.h_score_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(cpPair.second);
1635  auto const& cp_linked =
1636  std::find_if(std::begin(cPOnLayerMap[cpPair.first]),
1637  std::end(cPOnLayerMap[cpPair.first]),
1638  [&lcRef](const std::pair<edm::Ref<reco::CaloClusterCollection>, std::pair<float, float>>& p) {
1639  return p.first == lcRef;
1640  });
1641  if (cp_linked ==
1642  cPOnLayerMap[cpPair.first].end()) // This should never happen by construction of the association maps
1643  continue;
1644  histograms.h_sharedenergy_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(
1645  cp_linked->second.first / clusters[lcId].energy(), clusters[lcId].energy());
1646  histograms.h_energy_vs_score_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(
1647  cpPair.second, cp_linked->second.first / clusters[lcId].energy());
1648  }
1649  const auto assoc =
1650  std::count_if(std::begin(cps), std::end(cps), [](const auto& obj) { return obj.second < ScoreCutLCtoCP_; });
1651  if (assoc) {
1652  histograms.h_num_layercl_eta_perlayer.at(lcLayerId)->Fill(clusters[lcId].eta());
1653  histograms.h_num_layercl_phi_perlayer.at(lcLayerId)->Fill(clusters[lcId].phi());
1654  if (assoc > 1) {
1655  histograms.h_numMerge_layercl_eta_perlayer.at(lcLayerId)->Fill(clusters[lcId].eta());
1656  histograms.h_numMerge_layercl_phi_perlayer.at(lcLayerId)->Fill(clusters[lcId].phi());
1657  }
1658  const auto& best = std::min_element(
1659  std::begin(cps), std::end(cps), [](const auto& obj1, const auto& obj2) { return obj1.second < obj2.second; });
1660  const auto& best_cp_linked =
1661  std::find_if(std::begin(cPOnLayerMap[best->first]),
1662  std::end(cPOnLayerMap[best->first]),
1663  [&lcRef](const std::pair<edm::Ref<reco::CaloClusterCollection>, std::pair<float, float>>& p) {
1664  return p.first == lcRef;
1665  });
1666  if (best_cp_linked ==
1667  cPOnLayerMap[best->first].end()) // This should never happen by construction of the association maps
1668  continue;
1669  histograms.h_sharedenergy_layercl2caloparticle_vs_eta_perlayer.at(lcLayerId)->Fill(
1670  clusters[lcId].eta(), best_cp_linked->second.first / clusters[lcId].energy());
1671  histograms.h_sharedenergy_layercl2caloparticle_vs_phi_perlayer.at(lcLayerId)->Fill(
1672  clusters[lcId].phi(), best_cp_linked->second.first / clusters[lcId].energy());
1673  }
1674  } // End of loop over LayerClusters
1675 
1676  // Here we do fill the plots to compute the different metrics linked to
1677  // gen-level, namely efficiency and duplicate. In this loop we should restrict
1678  // only to the selected caloParaticles.
1679  for (const auto& cpId : cPSelectedIndices) {
1680  const edm::Ref<CaloParticleCollection> cpRef(caloParticleHandle, cpId);
1681  const auto& lcsIt = cPOnLayerMap.find(cpRef);
1682 
1683  std::map<unsigned int, float> cPEnergyOnLayer;
1684  for (unsigned int layerId = 0; layerId < layers * 2; ++layerId)
1685  cPEnergyOnLayer[layerId] = 0;
1686 
1687  const SimClusterRefVector& simClusterRefVector = cP[cpId].simClusters();
1688  for (const auto& it_sc : simClusterRefVector) {
1689  const SimCluster& simCluster = (*(it_sc));
1690  const auto& hits_and_fractions = simCluster.hits_and_fractions();
1691  for (const auto& it_haf : hits_and_fractions) {
1692  const DetId hitid = (it_haf.first);
1693  const int cpLayerId =
1694  recHitTools_->getLayerWithOffset(hitid) + layers * ((recHitTools_->zside(hitid) + 1) >> 1) - 1;
1695  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(hitid);
1696  if (itcheck != hitMap.end()) {
1697  const HGCRecHit* hit = itcheck->second;
1698  cPEnergyOnLayer[cpLayerId] += it_haf.second * hit->energy();
1699  }
1700  }
1701  }
1702 
1703  for (unsigned int layerId = 0; layerId < layers * 2; ++layerId) {
1704  if (!cPEnergyOnLayer[layerId])
1705  continue;
1706 
1707  histograms.h_denom_caloparticle_eta_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
1708  histograms.h_denom_caloparticle_phi_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
1709 
1710  if (lcsIt == cPOnLayerMap.end())
1711  continue;
1712  const auto& lcs = lcsIt->val;
1713 
1714  auto getLCLayerId = [&](const unsigned int lcId) {
1715  const std::vector<std::pair<DetId, float>>& hits_and_fractions = clusters[lcId].hitsAndFractions();
1716  const auto firstHitDetId = hits_and_fractions[0].first;
1717  const unsigned int lcLayerId = recHitTools_->getLayerWithOffset(firstHitDetId) +
1718  layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
1719  return lcLayerId;
1720  };
1721 
1722  for (const auto& lcPair : lcs) {
1723  if (getLCLayerId(lcPair.first.index()) != layerId)
1724  continue;
1725  histograms.h_score_caloparticle2layercl_perlayer.at(layerId)->Fill(lcPair.second.second);
1726  histograms.h_sharedenergy_caloparticle2layercl_perlayer.at(layerId)->Fill(
1727  lcPair.second.first / cPEnergyOnLayer[layerId], cPEnergyOnLayer[layerId]);
1728  histograms.h_energy_vs_score_caloparticle2layercl_perlayer.at(layerId)->Fill(
1729  lcPair.second.second, lcPair.second.first / cPEnergyOnLayer[layerId]);
1730  }
1731  const auto assoc = std::count_if(std::begin(lcs), std::end(lcs), [&](const auto& obj) {
1732  if (getLCLayerId(obj.first.index()) != layerId)
1733  return false;
1734  else
1735  return obj.second.second < ScoreCutCPtoLC_;
1736  });
1737  if (assoc) {
1738  histograms.h_num_caloparticle_eta_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
1739  histograms.h_num_caloparticle_phi_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
1740  if (assoc > 1) {
1741  histograms.h_numDup_caloparticle_eta_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
1742  histograms.h_numDup_caloparticle_phi_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
1743  }
1744  const auto best = std::min_element(std::begin(lcs), std::end(lcs), [&](const auto& obj1, const auto& obj2) {
1745  if (getLCLayerId(obj1.first.index()) != layerId)
1746  return false;
1747  else if (getLCLayerId(obj2.first.index()) == layerId)
1748  return obj1.second.second < obj2.second.second;
1749  else
1750  return true;
1751  });
1752  histograms.h_sharedenergy_caloparticle2layercl_vs_eta_perlayer.at(layerId)->Fill(
1753  cP[cpId].g4Tracks()[0].momentum().eta(), best->second.first / cPEnergyOnLayer[layerId]);
1754  histograms.h_sharedenergy_caloparticle2layercl_vs_phi_perlayer.at(layerId)->Fill(
1755  cP[cpId].g4Tracks()[0].momentum().phi(), best->second.first / cPEnergyOnLayer[layerId]);
1756  }
1757  }
1758  }
1759 }
1760 
1762  const Histograms& histograms,
1763  int count,
1766  edm::Handle<std::vector<SimCluster>> simClusterHandle,
1767  std::vector<SimCluster> const& sC,
1768  std::vector<size_t> const& sCIndices,
1769  const std::vector<float>& mask,
1770  std::unordered_map<DetId, const HGCRecHit*> const& hitMap,
1771  unsigned int layers,
1772  const hgcal::RecoToSimCollectionWithSimClusters& scsInLayerClusterMap,
1773  const hgcal::SimToRecoCollectionWithSimClusters& lcsInSimClusterMap) const {
1774  auto nLayerClusters = clusters.size();
1775 
1776  // Here we do fill the plots to compute the different metrics linked to
1777  // reco-level, namely fake-rate and merge-rate. In this loop we should *not*
1778  // restrict only to the selected SimClusters.
1779  for (unsigned int lcId = 0; lcId < nLayerClusters; ++lcId) {
1780  if (mask[lcId] != 0.) {
1781  LogDebug("HGCalValidator") << "Skipping layer cluster " << lcId << " not belonging to mask" << std::endl;
1782  continue;
1783  }
1784  const std::vector<std::pair<DetId, float>>& hits_and_fractions = clusters[lcId].hitsAndFractions();
1785  const auto firstHitDetId = hits_and_fractions[0].first;
1786  const int lcLayerId =
1787  recHitTools_->getLayerWithOffset(firstHitDetId) + layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
1788  //Although the ones below are already created in the LC to CP association, we will
1789  //recreate them here since in the post processor it looks in a specific directory.
1790  histograms.h_denom_layercl_in_simcl_eta_perlayer[count].at(lcLayerId)->Fill(clusters[lcId].eta());
1791  histograms.h_denom_layercl_in_simcl_phi_perlayer[count].at(lcLayerId)->Fill(clusters[lcId].phi());
1792  //
1793  const edm::Ref<reco::CaloClusterCollection> lcRef(clusterHandle, lcId);
1794  const auto& scsIt = scsInLayerClusterMap.find(lcRef);
1795  if (scsIt == scsInLayerClusterMap.end())
1796  continue;
1797 
1798  const auto& scs = scsIt->val;
1799  // If a reconstructed LayerCluster has energy 0 but is linked to at least a
1800  // SimCluster, then his score should be 1 as set in the associator
1801  if (clusters[lcId].energy() == 0. && !scs.empty()) {
1802  for (const auto& scPair : scs) {
1803  histograms.h_score_layercl2simcluster_perlayer[count].at(lcLayerId)->Fill(scPair.second);
1804  }
1805  continue;
1806  }
1807  //Loop through all SimClusters linked to the layer cluster under study
1808  for (const auto& scPair : scs) {
1809  LogDebug("HGCalValidator") << "layerCluster Id: \t" << lcId << "\t SC id: \t" << scPair.first.index()
1810  << "\t score \t" << scPair.second << std::endl;
1811  //This should be filled #layerClusters in layer x #linked SimClusters
1812  histograms.h_score_layercl2simcluster_perlayer[count].at(lcLayerId)->Fill(scPair.second);
1813  auto const& sc_linked =
1814  std::find_if(std::begin(lcsInSimClusterMap[scPair.first]),
1815  std::end(lcsInSimClusterMap[scPair.first]),
1816  [&lcRef](const std::pair<edm::Ref<reco::CaloClusterCollection>, std::pair<float, float>>& p) {
1817  return p.first == lcRef;
1818  });
1819  if (sc_linked ==
1820  lcsInSimClusterMap[scPair.first].end()) // This should never happen by construction of the association maps
1821  continue;
1822  histograms.h_sharedenergy_layercl2simcluster_perlayer[count].at(lcLayerId)->Fill(
1823  sc_linked->second.first / clusters[lcId].energy(), clusters[lcId].energy());
1824  histograms.h_energy_vs_score_layercl2simcluster_perlayer[count].at(lcLayerId)->Fill(
1825  scPair.second, sc_linked->second.first / clusters[lcId].energy());
1826  }
1827  //Here he counts how many of the linked SimClusters of the layer cluster under study have a score above a certain value.
1828  const auto assoc =
1829  std::count_if(std::begin(scs), std::end(scs), [](const auto& obj) { return obj.second < ScoreCutLCtoSC_; });
1830  if (assoc) {
1831  histograms.h_num_layercl_in_simcl_eta_perlayer[count].at(lcLayerId)->Fill(clusters[lcId].eta());
1832  histograms.h_num_layercl_in_simcl_phi_perlayer[count].at(lcLayerId)->Fill(clusters[lcId].phi());
1833  if (assoc > 1) {
1834  histograms.h_numMerge_layercl_in_simcl_eta_perlayer[count].at(lcLayerId)->Fill(clusters[lcId].eta());
1835  histograms.h_numMerge_layercl_in_simcl_phi_perlayer[count].at(lcLayerId)->Fill(clusters[lcId].phi());
1836  }
1837  const auto& best = std::min_element(
1838  std::begin(scs), std::end(scs), [](const auto& obj1, const auto& obj2) { return obj1.second < obj2.second; });
1839  //From all SimClusters he founds the one with the best (lowest) score and takes his scId
1840  const auto& best_sc_linked =
1841  std::find_if(std::begin(lcsInSimClusterMap[best->first]),
1842  std::end(lcsInSimClusterMap[best->first]),
1843  [&lcRef](const std::pair<edm::Ref<reco::CaloClusterCollection>, std::pair<float, float>>& p) {
1844  return p.first == lcRef;
1845  });
1846  if (best_sc_linked ==
1847  lcsInSimClusterMap[best->first].end()) // This should never happen by construction of the association maps
1848  continue;
1849  histograms.h_sharedenergy_layercl2simcluster_vs_eta_perlayer[count].at(lcLayerId)->Fill(
1850  clusters[lcId].eta(), best_sc_linked->second.first / clusters[lcId].energy());
1851  histograms.h_sharedenergy_layercl2simcluster_vs_phi_perlayer[count].at(lcLayerId)->Fill(
1852  clusters[lcId].phi(), best_sc_linked->second.first / clusters[lcId].energy());
1853  }
1854  } // End of loop over LayerClusters
1855 
1856  // Here we do fill the plots to compute the different metrics linked to
1857  // gen-level, namely efficiency and duplicate. In this loop we should restrict
1858  // only to the selected SimClusters.
1859  for (const auto& scId : sCIndices) {
1860  const edm::Ref<SimClusterCollection> scRef(simClusterHandle, scId);
1861  const auto& lcsIt = lcsInSimClusterMap.find(scRef);
1862 
1863  std::map<unsigned int, float> sCEnergyOnLayer;
1864  for (unsigned int layerId = 0; layerId < layers * 2; ++layerId)
1865  sCEnergyOnLayer[layerId] = 0;
1866 
1867  const auto& hits_and_fractions = sC[scId].hits_and_fractions();
1868  for (const auto& it_haf : hits_and_fractions) {
1869  const DetId hitid = (it_haf.first);
1870  const int scLayerId =
1871  recHitTools_->getLayerWithOffset(hitid) + layers * ((recHitTools_->zside(hitid) + 1) >> 1) - 1;
1872  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(hitid);
1873  if (itcheck != hitMap.end()) {
1874  const HGCRecHit* hit = itcheck->second;
1875  sCEnergyOnLayer[scLayerId] += it_haf.second * hit->energy();
1876  }
1877  }
1878 
1879  for (unsigned int layerId = 0; layerId < layers * 2; ++layerId) {
1880  if (!sCEnergyOnLayer[layerId])
1881  continue;
1882 
1883  histograms.h_denom_simcluster_eta_perlayer[count].at(layerId)->Fill(sC[scId].eta());
1884  histograms.h_denom_simcluster_phi_perlayer[count].at(layerId)->Fill(sC[scId].phi());
1885 
1886  if (lcsIt == lcsInSimClusterMap.end())
1887  continue;
1888  const auto& lcs = lcsIt->val;
1889 
1890  auto getLCLayerId = [&](const unsigned int lcId) {
1891  const std::vector<std::pair<DetId, float>>& hits_and_fractions = clusters[lcId].hitsAndFractions();
1892  const auto firstHitDetId = hits_and_fractions[0].first;
1893  const unsigned int lcLayerId = recHitTools_->getLayerWithOffset(firstHitDetId) +
1894  layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
1895  return lcLayerId;
1896  };
1897 
1898  //Loop through layer clusters linked to the SimCluster under study
1899  for (const auto& lcPair : lcs) {
1900  auto lcId = lcPair.first.index();
1901  if (mask[lcId] != 0.) {
1902  LogDebug("HGCalValidator") << "Skipping layer cluster " << lcId << " not belonging to mask" << std::endl;
1903  continue;
1904  }
1905 
1906  if (getLCLayerId(lcId) != layerId)
1907  continue;
1908  histograms.h_score_simcluster2layercl_perlayer[count].at(layerId)->Fill(lcPair.second.second);
1909  histograms.h_sharedenergy_simcluster2layercl_perlayer[count].at(layerId)->Fill(
1910  lcPair.second.first / sCEnergyOnLayer[layerId], sCEnergyOnLayer[layerId]);
1911  histograms.h_energy_vs_score_simcluster2layercl_perlayer[count].at(layerId)->Fill(
1912  lcPair.second.second, lcPair.second.first / sCEnergyOnLayer[layerId]);
1913  }
1914  const auto assoc = std::count_if(std::begin(lcs), std::end(lcs), [&](const auto& obj) {
1915  if (getLCLayerId(obj.first.index()) != layerId)
1916  return false;
1917  else
1918  return obj.second.second < ScoreCutSCtoLC_;
1919  });
1920  if (assoc) {
1921  histograms.h_num_simcluster_eta_perlayer[count].at(layerId)->Fill(sC[scId].eta());
1922  histograms.h_num_simcluster_phi_perlayer[count].at(layerId)->Fill(sC[scId].phi());
1923  if (assoc > 1) {
1924  histograms.h_numDup_simcluster_eta_perlayer[count].at(layerId)->Fill(sC[scId].eta());
1925  histograms.h_numDup_simcluster_phi_perlayer[count].at(layerId)->Fill(sC[scId].phi());
1926  }
1927  const auto best = std::min_element(std::begin(lcs), std::end(lcs), [&](const auto& obj1, const auto& obj2) {
1928  if (getLCLayerId(obj1.first.index()) != layerId)
1929  return false;
1930  else if (getLCLayerId(obj2.first.index()) == layerId)
1931  return obj1.second.second < obj2.second.second;
1932  else
1933  return true;
1934  });
1935  histograms.h_sharedenergy_simcluster2layercl_vs_eta_perlayer[count].at(layerId)->Fill(
1936  sC[scId].eta(), best->second.first / sCEnergyOnLayer[layerId]);
1937  histograms.h_sharedenergy_simcluster2layercl_vs_phi_perlayer[count].at(layerId)->Fill(
1938  sC[scId].phi(), best->second.first / sCEnergyOnLayer[layerId]);
1939  }
1940  }
1941  }
1942 }
1943 
1945  int count,
1948  const Density& densities,
1949  edm::Handle<std::vector<CaloParticle>> caloParticleHandle,
1950  std::vector<CaloParticle> const& cP,
1951  std::vector<size_t> const& cPIndices,
1952  std::vector<size_t> const& cPSelectedIndices,
1953  std::unordered_map<DetId, const HGCRecHit*> const& hitMap,
1954  std::map<double, double> cummatbudg,
1955  unsigned int layers,
1956  std::vector<int> thicknesses,
1957  const hgcal::RecoToSimCollection& cpsInLayerClusterMap,
1958  const hgcal::SimToRecoCollection& cPOnLayerMap) const {
1959  //Each event to be treated as two events: an event in +ve endcap,
1960  //plus another event in -ve endcap. In this spirit there will be
1961  //a layer variable (layerid) that maps the layers in :
1962  //-z: 0->51
1963  //+z: 52->103
1964 
1965  //To keep track of total num of layer clusters per layer
1966  //tnlcpl[layerid]
1967  std::vector<int> tnlcpl(1000, 0); //tnlcpl.clear(); tnlcpl.reserve(1000);
1968 
1969  //To keep track of the total num of clusters per thickness in plus and in minus endcaps
1970  std::map<std::string, int> tnlcpthplus;
1971  tnlcpthplus.clear();
1972  std::map<std::string, int> tnlcpthminus;
1973  tnlcpthminus.clear();
1974  //At the beginning of the event all layers should be initialized to zero total clusters per thickness
1975  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
1976  tnlcpthplus.insert(std::pair<std::string, int>(std::to_string(*it), 0));
1977  tnlcpthminus.insert(std::pair<std::string, int>(std::to_string(*it), 0));
1978  }
1979  //To keep track of the total num of clusters with mixed thickness hits per event
1980  tnlcpthplus.insert(std::pair<std::string, int>("mixed", 0));
1981  tnlcpthminus.insert(std::pair<std::string, int>("mixed", 0));
1982 
1984  clusterHandle,
1985  clusters,
1986  caloParticleHandle,
1987  cP,
1988  cPIndices,
1989  cPSelectedIndices,
1990  hitMap,
1991  layers,
1992  cpsInLayerClusterMap,
1993  cPOnLayerMap);
1994 
1995  //To find out the total amount of energy clustered per layer
1996  //Initialize with zeros because I see clear gives weird numbers.
1997  std::vector<double> tecpl(1000, 0.0); //tecpl.clear(); tecpl.reserve(1000);
1998  //for the longitudinal depth barycenter
1999  std::vector<double> ldbar(1000, 0.0); //ldbar.clear(); ldbar.reserve(1000);
2000 
2001  //We need to compare with the total amount of energy coming from CaloParticles
2002  double caloparteneplus = 0.;
2003  double caloparteneminus = 0.;
2004  for (const auto& cpId : cPIndices) {
2005  if (cP[cpId].eta() >= 0.) {
2006  caloparteneplus = caloparteneplus + cP[cpId].energy();
2007  }
2008  if (cP[cpId].eta() < 0.) {
2009  caloparteneminus = caloparteneminus + cP[cpId].energy();
2010  }
2011  }
2012 
2013  //loop through clusters of the event
2014  for (unsigned int lcId = 0; lcId < clusters.size(); lcId++) {
2015  const std::vector<std::pair<DetId, float>> hits_and_fractions = clusters[lcId].hitsAndFractions();
2016 
2017  const DetId seedid = clusters[lcId].seed();
2018  const double seedx = recHitTools_->getPosition(seedid).x();
2019  const double seedy = recHitTools_->getPosition(seedid).y();
2020  DetId maxid = findmaxhit(clusters[lcId], hitMap);
2021 
2022  // const DetId maxid = clusters[lcId].max();
2023  double maxx = recHitTools_->getPosition(maxid).x();
2024  double maxy = recHitTools_->getPosition(maxid).y();
2025 
2026  //Auxillary variables to count the number of different kind of hits in each cluster
2027  int nthhits120p = 0;
2028  int nthhits200p = 0;
2029  int nthhits300p = 0;
2030  int nthhitsscintp = 0;
2031  int nthhits120m = 0;
2032  int nthhits200m = 0;
2033  int nthhits300m = 0;
2034  int nthhitsscintm = 0;
2035  //For the hits thickness of the layer cluster.
2036  double thickness = 0.;
2037  //The layer the cluster belongs to. As mentioned in the mapping above, it takes into account -z and +z.
2038  int layerid = 0;
2039  //We will need another layer variable for the longitudinal material budget file reading.
2040  //In this case we need no distinction between -z and +z.
2041  int lay = 0;
2042  //We will need here to save the combination thick_lay
2043  std::string istr = "";
2044  //boolean to check for the layer that the cluster belong to. Maybe later will check all the layer hits.
2045  bool cluslay = true;
2046  //zside that the current cluster belongs to.
2047  int zside = 0;
2048 
2049  for (std::vector<std::pair<DetId, float>>::const_iterator it_haf = hits_and_fractions.begin();
2050  it_haf != hits_and_fractions.end();
2051  ++it_haf) {
2052  const DetId rh_detid = it_haf->first;
2053  //The layer that the current hit belongs to
2054  layerid = recHitTools_->getLayerWithOffset(rh_detid) + layers * ((recHitTools_->zside(rh_detid) + 1) >> 1) - 1;
2055  lay = recHitTools_->getLayerWithOffset(rh_detid);
2056  zside = recHitTools_->zside(rh_detid);
2057  if (rh_detid.det() == DetId::Forward || rh_detid.det() == DetId::HGCalEE || rh_detid.det() == DetId::HGCalHSi) {
2058  thickness = recHitTools_->getSiThickness(rh_detid);
2059  } else if (rh_detid.det() == DetId::HGCalHSc) {
2060  thickness = -1;
2061  } else {
2062  LogDebug("HGCalValidator") << "These are HGCal layer clusters, you shouldn't be here !!! " << layerid << "\n";
2063  continue;
2064  }
2065 
2066  //Count here only once the layer cluster and save the combination thick_layerid
2067  std::string curistr = std::to_string((int)thickness);
2068  std::string lay_string = std::to_string(layerid);
2069  while (lay_string.size() < 2)
2070  lay_string.insert(0, "0");
2071  curistr += "_" + lay_string;
2072  if (cluslay) {
2073  tnlcpl[layerid]++;
2074  istr = curistr;
2075  cluslay = false;
2076  }
2077 
2078  if ((thickness == 120.) && (recHitTools_->zside(rh_detid) > 0.)) {
2079  nthhits120p++;
2080  } else if ((thickness == 120.) && (recHitTools_->zside(rh_detid) < 0.)) {
2081  nthhits120m++;
2082  } else if ((thickness == 200.) && (recHitTools_->zside(rh_detid) > 0.)) {
2083  nthhits200p++;
2084  } else if ((thickness == 200.) && (recHitTools_->zside(rh_detid) < 0.)) {
2085  nthhits200m++;
2086  } else if ((thickness == 300.) && (recHitTools_->zside(rh_detid) > 0.)) {
2087  nthhits300p++;
2088  } else if ((thickness == 300.) && (recHitTools_->zside(rh_detid) < 0.)) {
2089  nthhits300m++;
2090  } else if ((thickness == -1) && (recHitTools_->zside(rh_detid) > 0.)) {
2091  nthhitsscintp++;
2092  } else if ((thickness == -1) && (recHitTools_->zside(rh_detid) < 0.)) {
2093  nthhitsscintm++;
2094  } else { //assert(0);
2095  LogDebug("HGCalValidator")
2096  << " You are running a geometry that contains thicknesses different than the normal ones. "
2097  << "\n";
2098  }
2099 
2100  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
2101  if (itcheck == hitMap.end()) {
2102  LogDebug("HGCalValidator") << " You shouldn't be here - Unable to find a hit " << rh_detid.rawId() << " "
2103  << rh_detid.det() << " " << HGCalDetId(rh_detid) << "\n";
2104  continue;
2105  }
2106 
2107  const HGCRecHit* hit = itcheck->second;
2108 
2109  //Here for the per cell plots
2110  //----
2111  const double hit_x = recHitTools_->getPosition(rh_detid).x();
2112  const double hit_y = recHitTools_->getPosition(rh_detid).y();
2113  double distancetoseed = distance(seedx, seedy, hit_x, hit_y);
2114  double distancetomax = distance(maxx, maxy, hit_x, hit_y);
2115  if (distancetoseed != 0. && histograms.h_distancetoseedcell_perthickperlayer.count(curistr)) {
2116  histograms.h_distancetoseedcell_perthickperlayer.at(curistr)->Fill(distancetoseed);
2117  }
2118  //----
2119  if (distancetoseed != 0. && histograms.h_distancetoseedcell_perthickperlayer_eneweighted.count(curistr)) {
2120  histograms.h_distancetoseedcell_perthickperlayer_eneweighted.at(curistr)->Fill(distancetoseed, hit->energy());
2121  }
2122  //----
2123  if (distancetomax != 0. && histograms.h_distancetomaxcell_perthickperlayer.count(curistr)) {
2124  histograms.h_distancetomaxcell_perthickperlayer.at(curistr)->Fill(distancetomax);
2125  }
2126  //----
2127  if (distancetomax != 0. && histograms.h_distancetomaxcell_perthickperlayer_eneweighted.count(curistr)) {
2128  histograms.h_distancetomaxcell_perthickperlayer_eneweighted.at(curistr)->Fill(distancetomax, hit->energy());
2129  }
2130 
2131  //Let's check the density
2132  std::map<DetId, float>::const_iterator dit = densities.find(rh_detid);
2133  if (dit == densities.end()) {
2134  LogDebug("HGCalValidator") << " You shouldn't be here - Unable to find a density " << rh_detid.rawId() << " "
2135  << rh_detid.det() << " " << HGCalDetId(rh_detid) << "\n";
2136  continue;
2137  }
2138 
2139  if (histograms.h_cellsenedens_perthick.count((int)thickness)) {
2140  histograms.h_cellsenedens_perthick.at((int)thickness)->Fill(dit->second);
2141  }
2142 
2143  } // end of loop through hits and fractions
2144 
2145  //Check for simultaneously having hits of different kind. Checking at least two combinations is sufficient.
2146  if ((nthhits120p != 0 && nthhits200p != 0) || (nthhits120p != 0 && nthhits300p != 0) ||
2147  (nthhits120p != 0 && nthhitsscintp != 0) || (nthhits200p != 0 && nthhits300p != 0) ||
2148  (nthhits200p != 0 && nthhitsscintp != 0) || (nthhits300p != 0 && nthhitsscintp != 0)) {
2149  tnlcpthplus["mixed"]++;
2150  } else if ((nthhits120p != 0 || nthhits200p != 0 || nthhits300p != 0 || nthhitsscintp != 0)) {
2151  //This is a cluster with hits of one kind
2152  tnlcpthplus[std::to_string((int)thickness)]++;
2153  }
2154  if ((nthhits120m != 0 && nthhits200m != 0) || (nthhits120m != 0 && nthhits300m != 0) ||
2155  (nthhits120m != 0 && nthhitsscintm != 0) || (nthhits200m != 0 && nthhits300m != 0) ||
2156  (nthhits200m != 0 && nthhitsscintm != 0) || (nthhits300m != 0 && nthhitsscintm != 0)) {
2157  tnlcpthminus["mixed"]++;
2158  } else if ((nthhits120m != 0 || nthhits200m != 0 || nthhits300m != 0 || nthhitsscintm != 0)) {
2159  //This is a cluster with hits of one kind
2160  tnlcpthminus[std::to_string((int)thickness)]++;
2161  }
2162 
2163  //To find the thickness with the biggest amount of cells
2164  std::vector<int> bigamoth;
2165  bigamoth.clear();
2166  if (zside > 0) {
2167  bigamoth.push_back(nthhits120p);
2168  bigamoth.push_back(nthhits200p);
2169  bigamoth.push_back(nthhits300p);
2170  bigamoth.push_back(nthhitsscintp);
2171  }
2172  if (zside < 0) {
2173  bigamoth.push_back(nthhits120m);
2174  bigamoth.push_back(nthhits200m);
2175  bigamoth.push_back(nthhits300m);
2176  bigamoth.push_back(nthhitsscintm);
2177  }
2178  auto bgth = std::max_element(bigamoth.begin(), bigamoth.end());
2179  istr = std::to_string(thicknesses[std::distance(bigamoth.begin(), bgth)]);
2180  std::string lay_string = std::to_string(layerid);
2181  while (lay_string.size() < 2)
2182  lay_string.insert(0, "0");
2183  istr += "_" + lay_string;
2184 
2185  //Here for the per cluster plots that need the thickness_layer info
2186  if (histograms.h_cellsnum_perthickperlayer.count(istr)) {
2187  histograms.h_cellsnum_perthickperlayer.at(istr)->Fill(hits_and_fractions.size());
2188  }
2189 
2190  //Now, with the distance between seed and max cell.
2191  double distancebetseedandmax = distance(seedx, seedy, maxx, maxy);
2192  //The thickness_layer combination in this case will use the thickness of the seed as a convention.
2193  std::string seedstr = std::to_string((int)recHitTools_->getSiThickness(seedid)) + "_" + std::to_string(layerid);
2194  seedstr += "_" + lay_string;
2195  if (histograms.h_distancebetseedandmaxcell_perthickperlayer.count(seedstr)) {
2196  histograms.h_distancebetseedandmaxcell_perthickperlayer.at(seedstr)->Fill(distancebetseedandmax);
2197  }
2198  if (histograms.h_distancebetseedandmaxcellvsclusterenergy_perthickperlayer.count(seedstr)) {
2199  histograms.h_distancebetseedandmaxcellvsclusterenergy_perthickperlayer.at(seedstr)->Fill(distancebetseedandmax,
2200  clusters[lcId].energy());
2201  }
2202 
2203  //Energy clustered per layer
2204  tecpl[layerid] = tecpl[layerid] + clusters[lcId].energy();
2205  ldbar[layerid] = ldbar[layerid] + clusters[lcId].energy() * cummatbudg[(double)lay];
2206 
2207  } //end of loop through clusters of the event
2208 
2209  //After the end of the event we can now fill with the results.
2210  //First a couple of variables to keep the sum of the energy of all clusters
2211  double sumeneallcluspl = 0.;
2212  double sumeneallclusmi = 0.;
2213  //And the longitudinal variable
2214  double sumldbarpl = 0.;
2215  double sumldbarmi = 0.;
2216  //Per layer : Loop 0->103
2217  for (unsigned ilayer = 0; ilayer < layers * 2; ++ilayer) {
2218  if (histograms.h_clusternum_perlayer.count(ilayer)) {
2219  histograms.h_clusternum_perlayer.at(ilayer)->Fill(tnlcpl[ilayer]);
2220  }
2221  // Two times one for plus and one for minus
2222  //First with the -z endcap
2223  if (ilayer < layers) {
2224  if (histograms.h_energyclustered_perlayer.count(ilayer)) {
2225  if (caloparteneminus != 0.) {
2226  histograms.h_energyclustered_perlayer.at(ilayer)->Fill(100. * tecpl[ilayer] / caloparteneminus);
2227  }
2228  }
2229  //Keep here the total energy for the event in -z
2230  sumeneallclusmi = sumeneallclusmi + tecpl[ilayer];
2231  //And for the longitudinal variable
2232  sumldbarmi = sumldbarmi + ldbar[ilayer];
2233  } else { //Then for the +z
2234  if (histograms.h_energyclustered_perlayer.count(ilayer)) {
2235  if (caloparteneplus != 0.) {
2236  histograms.h_energyclustered_perlayer.at(ilayer)->Fill(100. * tecpl[ilayer] / caloparteneplus);
2237  }
2238  }
2239  //Keep here the total energy for the event in -z
2240  sumeneallcluspl = sumeneallcluspl + tecpl[ilayer];
2241  //And for the longitudinal variable
2242  sumldbarpl = sumldbarpl + ldbar[ilayer];
2243  } //end of +z loop
2244 
2245  } //end of loop over layers
2246 
2247  //Per thickness
2248  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
2249  if (histograms.h_clusternum_perthick.count(*it)) {
2250  histograms.h_clusternum_perthick.at(*it)->Fill(tnlcpthplus[std::to_string(*it)]);
2251  histograms.h_clusternum_perthick.at(*it)->Fill(tnlcpthminus[std::to_string(*it)]);
2252  }
2253  }
2254  //Mixed thickness clusters
2255  histograms.h_mixedhitscluster_zplus[count]->Fill(tnlcpthplus["mixed"]);
2256  histograms.h_mixedhitscluster_zminus[count]->Fill(tnlcpthminus["mixed"]);
2257 
2258  //Total energy clustered from all layer clusters (fraction)
2259  if (caloparteneplus != 0.) {
2260  histograms.h_energyclustered_zplus[count]->Fill(100. * sumeneallcluspl / caloparteneplus);
2261  }
2262  if (caloparteneminus != 0.) {
2263  histograms.h_energyclustered_zminus[count]->Fill(100. * sumeneallclusmi / caloparteneminus);
2264  }
2265 
2266  //For the longitudinal depth barycenter
2267  histograms.h_longdepthbarycentre_zplus[count]->Fill(sumldbarpl / sumeneallcluspl);
2268  histograms.h_longdepthbarycentre_zminus[count]->Fill(sumldbarmi / sumeneallclusmi);
2269 }
2270 
2272  int count,
2273  const ticl::TracksterCollection& tracksters,
2275  std::vector<CaloParticle> const& cP,
2276  std::vector<size_t> const& cPIndices,
2277  std::vector<size_t> const& cPSelectedIndices,
2278  std::unordered_map<DetId, const HGCRecHit*> const& hitMap,
2279  unsigned int layers) const {
2280  auto nTracksters = tracksters.size();
2281  //Consider CaloParticles coming from the hard scatterer, excluding the PU contribution.
2282  auto nCaloParticles = cPIndices.size();
2283 
2284  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>> detIdToCaloParticleId_Map;
2285  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInTrackster>> detIdToTracksterId_Map;
2286  std::vector<int> tracksters_fakemerge(nTracksters, 0);
2287  std::vector<int> tracksters_duplicate(nTracksters, 0);
2288 
2289  // this contains the ids of the CaloParticles contributing with at least one hit to the Trackster and the reconstruction error
2290  //cpsInLayerCluster[trackster][CPids]
2291  //Connects a Trackster with all related CaloParticles.
2292  std::vector<std::vector<std::pair<unsigned int, float>>> cpsInTrackster;
2293  cpsInTrackster.resize(nTracksters);
2294 
2295  //cPOnLayer[caloparticle][layer]
2296  //This defines a "calo particle on layer" concept. It is only filled in case
2297  //that calo particle has a reconstructed hit related via detid. So, a cPOnLayer[i][j] connects a
2298  //specific calo particle i in layer j with:
2299  //1. the sum of all rechits energy times fraction of the relevant simhit in layer j related to that calo particle i.
2300  //2. the hits and fractions of that calo particle i in layer j.
2301  //3. the layer clusters with matched rechit id.
2302  std::unordered_map<int, std::vector<caloParticleOnLayer>> cPOnLayer;
2303  for (unsigned int i = 0; i < nCaloParticles; ++i) {
2304  auto cpIndex = cPIndices[i];
2305  cPOnLayer[cpIndex].resize(layers * 2);
2306  for (unsigned int j = 0; j < layers * 2; ++j) {
2307  cPOnLayer[cpIndex][j].caloParticleId = cpIndex;
2308  cPOnLayer[cpIndex][j].energy = 0.f;
2309  cPOnLayer[cpIndex][j].hits_and_fractions.clear();
2310  }
2311  }
2312 
2313  for (const auto& cpId : cPIndices) {
2314  //take sim clusters
2315  const SimClusterRefVector& simClusterRefVector = cP[cpId].simClusters();
2316  //loop through sim clusters
2317  for (const auto& it_sc : simClusterRefVector) {
2318  const SimCluster& simCluster = (*(it_sc));
2319  const auto& hits_and_fractions = simCluster.hits_and_fractions();
2320  for (const auto& it_haf : hits_and_fractions) {
2321  DetId hitid = (it_haf.first);
2322  //V9:maps the layers in -z: 0->51 and in +z: 52->103
2323  //V10:maps the layers in -z: 0->49 and in +z: 50->99
2324  int cpLayerId = recHitTools_->getLayerWithOffset(hitid) + layers * ((recHitTools_->zside(hitid) + 1) >> 1) - 1;
2325  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(hitid);
2326  //Checks whether the current hit belonging to sim cluster has a reconstructed hit.
2327  if (itcheck != hitMap.end()) {
2328  const HGCRecHit* hit = itcheck->second;
2329  //Since the current hit from sim cluster has a reconstructed hit with the same detid,
2330  //make a map that will connect a detid with:
2331  //1. the CaloParticles that have a SimCluster with sim hits in that cell via caloparticle id.
2332  //2. the sum of all simhits fractions that contributes to that detid.
2333  //So, keep in mind that in case of multiple CaloParticles contributing in the same cell
2334  //the fraction is the sum over all calo particles. So, something like:
2335  //detid: (caloparticle 1, sum of hits fractions in that detid over all cp) , (caloparticle 2, sum of hits fractions in that detid over all cp), (caloparticle 3, sum of hits fractions in that detid over all cp) ...
2336  auto hit_find_it = detIdToCaloParticleId_Map.find(hitid);
2337  if (hit_find_it == detIdToCaloParticleId_Map.end()) {
2338  detIdToCaloParticleId_Map[hitid] = std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>();
2339  detIdToCaloParticleId_Map[hitid].emplace_back(
2340  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
2341  } else {
2342  auto findHitIt = std::find(detIdToCaloParticleId_Map[hitid].begin(),
2343  detIdToCaloParticleId_Map[hitid].end(),
2344  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
2345  if (findHitIt != detIdToCaloParticleId_Map[hitid].end()) {
2346  findHitIt->fraction += it_haf.second;
2347  } else {
2348  detIdToCaloParticleId_Map[hitid].emplace_back(
2349  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
2350  }
2351  }
2352  //Since the current hit from sim cluster has a reconstructed hit with the same detid,
2353  //fill the cPOnLayer[caloparticle][layer] object with energy (sum of all rechits energy times fraction
2354  //of the relevant simhit) and keep the hit (detid and fraction) that contributed.
2355  cPOnLayer[cpId][cpLayerId].energy += it_haf.second * hit->energy();
2356  // We need to compress the hits and fractions in order to have a
2357  // reasonable score between CP and LC. Imagine, for example, that a
2358  // CP has detID X used by 2 SimClusters with different fractions. If
2359  // a single LC uses X with fraction 1 and is compared to the 2
2360  // contributions separately, it will be assigned a score != 0, which
2361  // is wrong.
2362  auto& haf = cPOnLayer[cpId][cpLayerId].hits_and_fractions;
2363  auto found = std::find_if(
2364  std::begin(haf), std::end(haf), [&hitid](const std::pair<DetId, float>& v) { return v.first == hitid; });
2365  if (found != haf.end()) {
2366  found->second += it_haf.second;
2367  } else {
2368  cPOnLayer[cpId][cpLayerId].hits_and_fractions.emplace_back(hitid, it_haf.second);
2369  }
2370  }
2371  } // end of loop through simhits
2372  } // end of loop through SimClusters
2373  } // end of loop through CaloParticles
2374 
2375  auto apply_LCMultiplicity = [](const ticl::Trackster& trackster, const reco::CaloClusterCollection& layerClusters) {
2376  std::vector<std::pair<DetId, float>> hits_and_fractions_norm;
2377  int lcInTst = 0;
2378  std::for_each(std::begin(trackster.vertices()), std::end(trackster.vertices()), [&](unsigned int idx) {
2379  const auto fraction = 1.f / trackster.vertex_multiplicity(lcInTst++);
2380  for (const auto& cell : layerClusters[idx].hitsAndFractions()) {
2381  hits_and_fractions_norm.emplace_back(cell.first, cell.second * fraction);
2382  }
2383  });
2384  return hits_and_fractions_norm;
2385  };
2386 
2387  //Loop through Tracksters
2388  for (unsigned int tstId = 0; tstId < nTracksters; ++tstId) {
2389  if (tracksters[tstId].vertices().empty())
2390  continue;
2391 
2392  std::unordered_map<unsigned, float> CPEnergyInTST;
2393  int maxCPId_byNumberOfHits = -1;
2394  unsigned int maxCPNumberOfHitsInTST = 0;
2395  int maxCPId_byEnergy = -1;
2396  float maxEnergySharedTSTandCP = 0.f;
2397  float energyFractionOfTSTinCP = 0.f;
2398  float energyFractionOfCPinTST = 0.f;
2399 
2400  //In case of matched rechit-simhit, so matched
2401  //CaloParticle-LayerCluster-Trackster, he counts and saves the number of
2402  //rechits related to the maximum energy CaloParticle out of all
2403  //CaloParticles related to that layer cluster and Trackster.
2404 
2405  std::unordered_map<unsigned, unsigned> occurrencesCPinTST;
2406  unsigned int numberOfNoiseHitsInTST = 0;
2407  unsigned int numberOfHaloHitsInTST = 0;
2408 
2409  const auto tst_hitsAndFractions = apply_LCMultiplicity(tracksters[tstId], layerClusters);
2410  const auto numberOfHitsInTST = tst_hitsAndFractions.size();
2411 
2412  //hitsToCaloParticleId is a vector of ints, one for each rechit of the
2413  //layer cluster under study. If negative, there is no simhit from any CaloParticle related.
2414  //If positive, at least one CaloParticle has been found with matched simhit.
2415  //In more detail:
2416  // 1. hitsToCaloParticleId[hitId] = -3
2417  // TN: These represent Halo Cells(N) that have not been
2418  // assigned to any CaloParticle (hence the T).
2419  // 2. hitsToCaloParticleId[hitId] = -2
2420  // FN: There represent Halo Cells(N) that have been assigned
2421  // to a CaloParticle (hence the F, since those should have not been marked as halo)
2422  // 3. hitsToCaloParticleId[hitId] = -1
2423  // FP: These represent Real Cells(P) that have not been
2424  // assigned to any CaloParticle (hence the F, since these are fakes)
2425  // 4. hitsToCaloParticleId[hitId] >= 0
2426  // TP There represent Real Cells(P) that have been assigned
2427  // to a CaloParticle (hence the T)
2428 
2429  std::vector<int> hitsToCaloParticleId(numberOfHitsInTST);
2430  //Det id of the first hit just to make the lcLayerId variable
2431  //which maps the layers in -z: 0->51 and in +z: 52->103
2432 
2433  //Loop through the hits of the trackster under study
2434  for (unsigned int hitId = 0; hitId < numberOfHitsInTST; hitId++) {
2435  const auto rh_detid = tst_hitsAndFractions[hitId].first;
2436  const auto rhFraction = tst_hitsAndFractions[hitId].second;
2437 
2438  const int lcLayerId =
2439  recHitTools_->getLayerWithOffset(rh_detid) + layers * ((recHitTools_->zside(rh_detid) + 1) >> 1) - 1;
2440  //Since the hit is belonging to the layer cluster, it must also be in the rechits map.
2441  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
2442  const HGCRecHit* hit = itcheck->second;
2443 
2444  //Make a map that will connect a detid (that belongs to a rechit of the layer cluster under study,
2445  //no need to save others) with:
2446  //1. the layer clusters that have rechits in that detid
2447  //2. the fraction of the rechit of each layer cluster that contributes to that detid.
2448  //So, something like:
2449  //detid: (layer cluster 1, hit fraction) , (layer cluster 2, hit fraction), (layer cluster 3, hit fraction) ...
2450  //here comparing with the calo particle map above the
2451  auto hit_find_in_LC = detIdToTracksterId_Map.find(rh_detid);
2452  if (hit_find_in_LC == detIdToTracksterId_Map.end()) {
2453  detIdToTracksterId_Map[rh_detid] = std::vector<HGVHistoProducerAlgo::detIdInfoInTrackster>();
2454  }
2455  detIdToTracksterId_Map[rh_detid].emplace_back(
2456  HGVHistoProducerAlgo::detIdInfoInTrackster{tstId, tstId, rhFraction});
2457 
2458  //Check whether the rechit of the layer cluster under study has a sim hit in the same cell.
2459  auto hit_find_in_CP = detIdToCaloParticleId_Map.find(rh_detid);
2460 
2461  // if the fraction is zero or the hit does not belong to any calo
2462  // particle, set the caloparticleId for the hit to -1 this will
2463  // contribute to the number of noise hits
2464 
2465  // MR Remove the case in which the fraction is 0, since this could be a
2466  // real hit that has been marked as halo.
2467  if (rhFraction == 0.) {
2468  hitsToCaloParticleId[hitId] = -2;
2469  numberOfHaloHitsInTST++;
2470  }
2471  if (hit_find_in_CP == detIdToCaloParticleId_Map.end()) {
2472  hitsToCaloParticleId[hitId] -= 1;
2473  } else {
2474  auto maxCPEnergyInTST = 0.f;
2475  auto maxCPId = -1;
2476  for (const auto& h : hit_find_in_CP->second) {
2477  auto shared_fraction = std::min(rhFraction, h.fraction);
2478  //We are in the case where there are calo particles with simhits connected via detid with the rechit under study
2479  //So, from all layers clusters, find the rechits that are connected with a calo particle and save/calculate the
2480  //energy of that calo particle as the sum over all rechits of the rechits energy weighted
2481  //by the caloparticle's fraction related to that rechit.
2482  CPEnergyInTST[h.clusterId] += shared_fraction * hit->energy();
2483  //Here cPOnLayer[caloparticle][layer] describe above is set.
2484  //Here for Tracksters with matched rechit the CP fraction times hit energy is added and saved .
2485  cPOnLayer[h.clusterId][lcLayerId].layerClusterIdToEnergyAndScore[tstId].first +=
2486  shared_fraction * hit->energy();
2487  cPOnLayer[h.clusterId][lcLayerId].layerClusterIdToEnergyAndScore[tstId].second = FLT_MAX;
2488  //cpsInTrackster[trackster][CPids]
2489  //Connects a Trackster with all related CaloParticles.
2490  cpsInTrackster[tstId].emplace_back(h.clusterId, FLT_MAX);
2491  //From all CaloParticles related to a layer cluster, he saves id and energy of the calo particle
2492  //that after simhit-rechit matching in layer has the maximum energy.
2493  if (shared_fraction > maxCPEnergyInTST) {
2494  //energy is used only here. cpid is saved for Tracksters
2495  maxCPEnergyInTST = CPEnergyInTST[h.clusterId];
2496  maxCPId = h.clusterId;
2497  }
2498  }
2499  //Keep in mind here maxCPId could be zero. So, below ask for negative not including zero to count noise.
2500  hitsToCaloParticleId[hitId] = maxCPId;
2501  }
2502 
2503  } //end of loop through rechits of the layer cluster.
2504 
2505  //Loop through all rechits to count how many of them are noise and how many are matched.
2506  //In case of matched rechit-simhit, he counts and saves the number of rechits related to the maximum energy CaloParticle.
2507  for (auto c : hitsToCaloParticleId) {
2508  if (c < 0) {
2509  numberOfNoiseHitsInTST++;
2510  } else {
2511  occurrencesCPinTST[c]++;
2512  }
2513  }
2514 
2515  //Below from all maximum energy CaloParticles, he saves the one with the largest amount
2516  //of related rechits.
2517  for (auto& c : occurrencesCPinTST) {
2518  if (c.second > maxCPNumberOfHitsInTST) {
2519  maxCPId_byNumberOfHits = c.first;
2520  maxCPNumberOfHitsInTST = c.second;
2521  }
2522  }
2523 
2524  //Find the CaloParticle that has the maximum energy shared with the Trackster under study.
2525  for (auto& c : CPEnergyInTST) {
2526  if (c.second > maxEnergySharedTSTandCP) {
2527  maxCPId_byEnergy = c.first;
2528  maxEnergySharedTSTandCP = c.second;
2529  }
2530  }
2531  //The energy of the CaloParticle that found to have the maximum energy shared with the Trackster under study.
2532  float totalCPEnergyFromLayerCP = 0.f;
2533  if (maxCPId_byEnergy >= 0) {
2534  //Loop through all layers
2535  for (unsigned int j = 0; j < layers * 2; ++j) {
2536  totalCPEnergyFromLayerCP = totalCPEnergyFromLayerCP + cPOnLayer[maxCPId_byEnergy][j].energy;
2537  }
2538  energyFractionOfCPinTST = maxEnergySharedTSTandCP / totalCPEnergyFromLayerCP;
2539  if (tracksters[tstId].raw_energy() > 0.f) {
2540  energyFractionOfTSTinCP = maxEnergySharedTSTandCP / tracksters[tstId].raw_energy();
2541  }
2542  }
2543 
2544  LogDebug("HGCalValidator") << std::setw(12) << "Trackster"
2545  << "\t" //LogDebug("HGCalValidator")
2546  << std::setw(10) << "mulcl energy"
2547  << "\t" << std::setw(5) << "nhits"
2548  << "\t" << std::setw(12) << "noise hits"
2549  << "\t" << std::setw(22) << "maxCPId_byNumberOfHits"
2550  << "\t" << std::setw(8) << "nhitsCP"
2551  << "\t" << std::setw(16) << "maxCPId_byEnergy"
2552  << "\t" << std::setw(23) << "maxEnergySharedTSTandCP"
2553  << "\t" << std::setw(22) << "totalCPEnergyFromAllLayerCP"
2554  << "\t" << std::setw(22) << "energyFractionOfTSTinCP"
2555  << "\t" << std::setw(25) << "energyFractionOfCPinTST"
2556  << "\t" << std::endl;
2557  LogDebug("HGCalValidator") << std::setw(12) << tstId << "\t" //LogDebug("HGCalValidator")
2558  << std::setw(10) << tracksters[tstId].raw_energy() << "\t" << std::setw(5)
2559  << numberOfHitsInTST << "\t" << std::setw(12) << numberOfNoiseHitsInTST << "\t"
2560  << std::setw(22) << maxCPId_byNumberOfHits << "\t" << std::setw(8)
2561  << maxCPNumberOfHitsInTST << "\t" << std::setw(16) << maxCPId_byEnergy << "\t"
2562  << std::setw(23) << maxEnergySharedTSTandCP << "\t" << std::setw(22)
2563  << totalCPEnergyFromLayerCP << "\t" << std::setw(22) << energyFractionOfTSTinCP << "\t"
2564  << std::setw(25) << energyFractionOfCPinTST << std::endl;
2565 
2566  } //end of loop through Tracksters
2567 
2568  //Loop through Tracksters
2569  for (unsigned int tstId = 0; tstId < nTracksters; ++tstId) {
2570  if (tracksters[tstId].vertices().empty())
2571  continue;
2572 
2573  // find the unique CaloParticles id contributing to the Tracksters
2574  //cpsInTrackster[trackster][CPids]
2575  std::sort(cpsInTrackster[tstId].begin(), cpsInTrackster[tstId].end());
2576  auto last = std::unique(cpsInTrackster[tstId].begin(), cpsInTrackster[tstId].end());
2577  cpsInTrackster[tstId].erase(last, cpsInTrackster[tstId].end());
2578 
2579  if (tracksters[tstId].raw_energy() == 0. && !cpsInTrackster[tstId].empty()) {
2580  //Loop through all CaloParticles contributing to Trackster tstId.
2581  for (auto& cpPair : cpsInTrackster[tstId]) {
2582  //In case of a Trackster with zero energy but related CaloParticles the score is set to 1.
2583  cpPair.second = 1.;
2584  LogDebug("HGCalValidator") << "Trackster Id: \t" << tstId << "\t CP id: \t" << cpPair.first << "\t score \t"
2585  << cpPair.second << std::endl;
2586  histograms.h_score_trackster2caloparticle[count]->Fill(cpPair.second);
2587  }
2588  continue;
2589  }
2590 
2591  const auto tst_hitsAndFractions = apply_LCMultiplicity(tracksters[tstId], layerClusters);
2592  ;
2593  // Compute the correct normalization
2594  float invTracksterEnergyWeight = 0.f;
2595  for (const auto& haf : tst_hitsAndFractions) {
2596  invTracksterEnergyWeight +=
2597  (haf.second * hitMap.at(haf.first)->energy()) * (haf.second * hitMap.at(haf.first)->energy());
2598  }
2599  invTracksterEnergyWeight = 1.f / invTracksterEnergyWeight;
2600 
2601  for (unsigned int i = 0; i < tst_hitsAndFractions.size(); ++i) {
2602  const auto rh_detid = tst_hitsAndFractions[i].first;
2603  const auto rhFraction = tst_hitsAndFractions[i].second;
2604  bool hitWithNoCP = false;
2605 
2606  auto hit_find_in_CP = detIdToCaloParticleId_Map.find(rh_detid);
2607  if (hit_find_in_CP == detIdToCaloParticleId_Map.end())
2608  hitWithNoCP = true;
2609  auto itcheck = hitMap.find(rh_detid);
2610  const HGCRecHit* hit = itcheck->second;
2611  float hitEnergyWeight = hit->energy() * hit->energy();
2612 
2613  for (auto& cpPair : cpsInTrackster[tstId]) {
2614  float cpFraction = 0.f;
2615  if (!hitWithNoCP) {
2616  auto findHitIt = std::find(detIdToCaloParticleId_Map[rh_detid].begin(),
2617  detIdToCaloParticleId_Map[rh_detid].end(),
2618  HGVHistoProducerAlgo::detIdInfoInCluster{cpPair.first, 0.f});
2619  if (findHitIt != detIdToCaloParticleId_Map[rh_detid].end()) {
2620  cpFraction = findHitIt->fraction;
2621  }
2622  }
2623  if (cpPair.second == FLT_MAX) {
2624  cpPair.second = 0.f;
2625  }
2626  cpPair.second +=
2627  (rhFraction - cpFraction) * (rhFraction - cpFraction) * hitEnergyWeight * invTracksterEnergyWeight;
2628  }
2629  } //end of loop through rechits of trackster
2630 
2631  //In case of a Trackster with some energy but none related CaloParticles print some info.
2632  if (cpsInTrackster[tstId].empty())
2633  LogDebug("HGCalValidator") << "Trackster Id: \t" << tstId << "\tCP id:\t-1 "
2634  << "\t score \t-1"
2635  << "\n";
2636 
2637  const auto score = std::min_element(std::begin(cpsInTrackster[tstId]),
2638  std::end(cpsInTrackster[tstId]),
2639  [](const auto& obj1, const auto& obj2) { return obj1.second < obj2.second; });
2640  for (auto& cpPair : cpsInTrackster[tstId]) {
2641  LogDebug("HGCalValidator") << "Trackster Id: \t" << tstId << "\t CP id: \t" << cpPair.first << "\t score \t"
2642  << cpPair.second << std::endl;
2643  float sharedeneCPallLayers = 0.;
2644  for (unsigned int j = 0; j < layers * 2; ++j) {
2645  auto const& cp_linked = cPOnLayer[cpPair.first][j].layerClusterIdToEnergyAndScore[tstId];
2646  sharedeneCPallLayers += cp_linked.first;
2647  }
2648  LogDebug("HGCalValidator") << "sharedeneCPallLayers " << sharedeneCPallLayers << std::endl;
2649  if (cpPair.first == score->first) {
2650  histograms.h_score_trackster2caloparticle[count]->Fill(score->second);
2651  histograms.h_sharedenergy_trackster2caloparticle[count]->Fill(sharedeneCPallLayers /
2652  tracksters[tstId].raw_energy());
2653  histograms.h_energy_vs_score_trackster2caloparticle[count]->Fill(
2654  score->second, sharedeneCPallLayers / tracksters[tstId].raw_energy());
2655  }
2656  }
2657  auto assocFakeMerge = std::count_if(std::begin(cpsInTrackster[tstId]),
2658  std::end(cpsInTrackster[tstId]),
2659  [](const auto& obj) { return obj.second < ScoreCutTSTtoCPFakeMerge_; });
2660  tracksters_fakemerge[tstId] = assocFakeMerge;
2661  } //end of loop through Tracksters
2662 
2663  std::unordered_map<int, std::vector<float>> score3d;
2664  std::unordered_map<int, std::vector<float>> tstSharedEnergy;
2665  std::unordered_map<int, std::vector<float>> tstSharedEnergyFrac;
2666 
2667  for (unsigned int i = 0; i < nCaloParticles; ++i) {
2668  auto cpIndex = cPIndices[i];
2669  score3d[cpIndex].resize(nTracksters);
2670  tstSharedEnergy[cpIndex].resize(nTracksters);
2671  tstSharedEnergyFrac[cpIndex].resize(nTracksters);
2672  for (unsigned int j = 0; j < nTracksters; ++j) {
2673  score3d[cpIndex][j] = FLT_MAX;
2674  tstSharedEnergy[cpIndex][j] = 0.f;
2675  tstSharedEnergyFrac[cpIndex][j] = 0.f;
2676  }
2677  }
2678 
2679  // Here we do fill the plots to compute the different metrics linked to
2680  // gen-level, namely efficiency an duplicate. In this loop we should restrict
2681  // only to the selected caloParaticles.
2682  for (const auto& cpId : cPSelectedIndices) {
2683  //We need to keep the Tracksters ids that are related to
2684  //CaloParticle under study for the final filling of the score.
2685  std::vector<unsigned int> cpId_tstId_related;
2686  cpId_tstId_related.clear();
2687 
2688  float CPenergy = 0.f;
2689  for (unsigned int layerId = 0; layerId < layers * 2; ++layerId) {
2690  unsigned int CPNumberOfHits = cPOnLayer[cpId][layerId].hits_and_fractions.size();
2691  //Below gives the CP energy related to Trackster per layer.
2692  CPenergy += cPOnLayer[cpId][layerId].energy;
2693  if (CPNumberOfHits == 0)
2694  continue;
2695  int tstWithMaxEnergyInCP = -1;
2696  //This is the maximum energy related to Trackster per layer.
2697  float maxEnergyTSTperlayerinCP = 0.f;
2698  float CPEnergyFractionInTSTperlayer = 0.f;
2699  //Remember and not confused by name. layerClusterIdToEnergyAndScore contains the Trackster id.
2700  for (const auto& tst : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
2701  if (tst.second.first > maxEnergyTSTperlayerinCP) {
2702  maxEnergyTSTperlayerinCP = tst.second.first;
2703  tstWithMaxEnergyInCP = tst.first;
2704  }
2705  }
2706  if (CPenergy > 0.f)
2707  CPEnergyFractionInTSTperlayer = maxEnergyTSTperlayerinCP / CPenergy;
2708 
2709  LogDebug("HGCalValidator") << std::setw(8) << "LayerId:\t" << std::setw(12) << "caloparticle\t" << std::setw(15)
2710  << "cp total energy\t" << std::setw(15) << "cpEnergyOnLayer\t" << std::setw(14)
2711  << "CPNhitsOnLayer\t" << std::setw(18) << "tstWithMaxEnergyInCP\t" << std::setw(15)
2712  << "maxEnergyTSTinCP\t" << std::setw(20) << "CPEnergyFractionInTST"
2713  << "\n";
2714  LogDebug("HGCalValidator") << std::setw(8) << layerId << "\t" << std::setw(12) << cpId << "\t" << std::setw(15)
2715  << cP[cpId].energy() << "\t" << std::setw(15) << CPenergy << "\t" << std::setw(14)
2716  << CPNumberOfHits << "\t" << std::setw(18) << tstWithMaxEnergyInCP << "\t"
2717  << std::setw(15) << maxEnergyTSTperlayerinCP << "\t" << std::setw(20)
2718  << CPEnergyFractionInTSTperlayer << "\n";
2719 
2720  for (unsigned int i = 0; i < CPNumberOfHits; ++i) {
2721  auto& cp_hitDetId = cPOnLayer[cpId][layerId].hits_and_fractions[i].first;
2722  auto& cpFraction = cPOnLayer[cpId][layerId].hits_and_fractions[i].second;
2723 
2724  bool hitWithNoTST = false;
2725  if (cpFraction == 0.f)
2726  continue; //hopefully this should never happen
2727  auto hit_find_in_TST = detIdToTracksterId_Map.find(cp_hitDetId);
2728  if (hit_find_in_TST == detIdToTracksterId_Map.end())
2729  hitWithNoTST = true;
2730  auto itcheck = hitMap.find(cp_hitDetId);
2731  const HGCRecHit* hit = itcheck->second;
2732  float hitEnergyWeight = hit->energy() * hit->energy();
2733  for (auto& lcPair : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
2734  unsigned int tracksterId = lcPair.first;
2735  if (std::find(std::begin(cpId_tstId_related), std::end(cpId_tstId_related), tracksterId) ==
2736  std::end(cpId_tstId_related)) {
2737  cpId_tstId_related.push_back(tracksterId);
2738  }
2739  float tstFraction = 0.f;
2740 
2741  if (!hitWithNoTST) {
2742  auto findHitIt = std::find(detIdToTracksterId_Map[cp_hitDetId].begin(),
2743  detIdToTracksterId_Map[cp_hitDetId].end(),
2744  HGVHistoProducerAlgo::detIdInfoInTrackster{tracksterId, 0, 0.f});
2745  if (findHitIt != detIdToTracksterId_Map[cp_hitDetId].end())
2746  tstFraction = findHitIt->fraction;
2747  }
2748  //Observe here that we do not divide as before by the layer cluster energy weight. We should sum first
2749  //over all layers and divide with the total CP energy over all layers.
2750  if (lcPair.second.second == FLT_MAX) {
2751  lcPair.second.second = 0.f;
2752  }
2753  lcPair.second.second += (tstFraction - cpFraction) * (tstFraction - cpFraction) * hitEnergyWeight;
2754  LogDebug("HGCalValidator") << "TracksterId:\t" << tracksterId << "\t"
2755  << "cpId:\t" << cpId << "\t"
2756  << "Layer: " << layerId << '\t' << "tstfraction,cpfraction:\t" << tstFraction
2757  << ", " << cpFraction << "\t"
2758  << "hitEnergyWeight:\t" << hitEnergyWeight << "\t"
2759  << "added delta:\t"
2760  << (tstFraction - cpFraction) * (tstFraction - cpFraction) * hitEnergyWeight
2761  << "\t"
2762  << "currect score numerator:\t" << lcPair.second.second << "\t"
2763  << "shared Energy:\t" << lcPair.second.first << '\n';
2764  }
2765  } //end of loop through sim hits of current calo particle
2766 
2767  if (cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore.empty())
2768  LogDebug("HGCalValidator") << "CP Id: \t" << cpId << "\t TST id:\t-1 "
2769  << "\t layer \t " << layerId << " Sub score in \t -1"
2770  << "\n";
2771 
2772  for (const auto& lcPair : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
2773  //3d score here without the denominator at this point
2774  if (score3d[cpId][lcPair.first] == FLT_MAX) {
2775  score3d[cpId][lcPair.first] = 0.f;
2776  }
2777  score3d[cpId][lcPair.first] += lcPair.second.second;
2778  tstSharedEnergy[cpId][lcPair.first] += lcPair.second.first;
2779  }
2780  } //end of loop through layers
2781 
2782  // Compute the correct normalization
2783  // We need to loop on the cPOnLayer data structure since this is the
2784  // only one that has the compressed information for multiple usage
2785  // of the same DetId by different SimClusters by a single CaloParticle.
2786  float invCPEnergyWeight = 0.f;
2787  for (const auto& layer : cPOnLayer[cpId]) {
2788  for (const auto& haf : layer.hits_and_fractions) {
2789  invCPEnergyWeight +=
2790  (haf.second * hitMap.at(haf.first)->energy()) * (haf.second * hitMap.at(haf.first)->energy());
2791  }
2792  }
2793  invCPEnergyWeight = 1.f / invCPEnergyWeight;
2794 
2795  //Loop through related Tracksters here
2796  //Will switch to vector for access because it is faster
2797  std::vector<int> cpId_tstId_related_vec(cpId_tstId_related.begin(), cpId_tstId_related.end());
2798  // In case the threshold to associate a CaloParticle to a Trackster is
2799  // below 50%, there could be cases in which the CP is linked to more than
2800  // one tracksters, leading to efficiencies >1. This boolean is used to
2801  // avoid "over counting".
2802  bool cp_considered_efficient = false;
2803  for (unsigned int i = 0; i < cpId_tstId_related_vec.size(); ++i) {
2804  auto tstId = cpId_tstId_related_vec[i];
2805  //Now time for the denominator
2806  score3d[cpId][tstId] = score3d[cpId][tstId] * invCPEnergyWeight;
2807  tstSharedEnergyFrac[cpId][tstId] = (tstSharedEnergy[cpId][tstId] / CPenergy);
2808 
2809  LogDebug("HGCalValidator") << "CP Id: \t" << cpId << "\t TST id: \t" << tstId << "\t score \t" //
2810  << score3d[cpId][tstId] << "\t"
2811  << "invCPEnergyWeight \t" << invCPEnergyWeight << "\t"
2812  << "Trackste energy: \t" << tracksters[tstId].raw_energy() << "\t"
2813  << "shared energy:\t" << tstSharedEnergy[cpId][tstId] << "\t"
2814  << "shared energy fraction:\t" << tstSharedEnergyFrac[cpId][tstId] << "\n";
2815 
2816  histograms.h_score_caloparticle2trackster[count]->Fill(score3d[cpId][tstId]);
2817 
2818  histograms.h_sharedenergy_caloparticle2trackster[count]->Fill(tstSharedEnergyFrac[cpId][tstId]);
2819  histograms.h_energy_vs_score_caloparticle2trackster[count]->Fill(score3d[cpId][tstId],
2820  tstSharedEnergyFrac[cpId][tstId]);
2821  // Fill the numerator for the efficiency calculation. The efficiency is computed by considering the energy shared between a Trackster and a _corresponding_ caloParticle. The threshold is configurable via python.
2822  if (!cp_considered_efficient && tstSharedEnergyFrac[cpId][tstId] >= minTSTSharedEneFracEfficiency_) {
2823  cp_considered_efficient = true;
2824  histograms.h_numEff_caloparticle_eta[count]->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
2825  histograms.h_numEff_caloparticle_phi[count]->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
2826  }
2827  } //end of loop through Tracksters
2828 
2829  auto is_assoc = [&](const auto& v) -> bool { return v < ScoreCutCPtoTSTEffDup_; };
2830 
2831  auto assocDup = std::count_if(std::begin(score3d[cpId]), std::end(score3d[cpId]), is_assoc);
2832 
2833  if (assocDup > 0) {
2834  histograms.h_num_caloparticle_eta[count]->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
2835  histograms.h_num_caloparticle_phi[count]->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
2836  auto best = std::min_element(std::begin(score3d[cpId]), std::end(score3d[cpId]));
2837  auto bestTstId = std::distance(std::begin(score3d[cpId]), best);
2838 
2839  histograms.h_sharedenergy_caloparticle2trackster_vs_eta[count]->Fill(
2840  cP[cpId].g4Tracks()[0].momentum().eta(), tracksters[bestTstId].raw_energy() / CPenergy);
2841  histograms.h_sharedenergy_caloparticle2trackster_vs_phi[count]->Fill(
2842  cP[cpId].g4Tracks()[0].momentum().phi(), tracksters[bestTstId].raw_energy() / CPenergy);
2843  LogDebug("HGCalValidator") << count << " " << cP[cpId].g4Tracks()[0].momentum().eta() << " "
2844  << cP[cpId].g4Tracks()[0].momentum().phi() << " " << tracksters[bestTstId].raw_energy()
2845  << " " << CPenergy << " " << (tracksters[bestTstId].raw_energy() / CPenergy) << " "
2846  << tstSharedEnergyFrac[cpId][bestTstId] << '\n';
2847  histograms.h_sharedenergy_caloparticle2trackster_assoc[count]->Fill(tstSharedEnergyFrac[cpId][bestTstId]);
2848  }
2849  if (assocDup >= 2) {
2850  auto match = std::find_if(std::begin(score3d[cpId]), std::end(score3d[cpId]), is_assoc);
2851  while (match != score3d[cpId].end()) {
2852  tracksters_duplicate[std::distance(std::begin(score3d[cpId]), match)] = 1;
2853  match = std::find_if(std::next(match), std::end(score3d[cpId]), is_assoc);
2854  }
2855  }
2856  histograms.h_denom_caloparticle_eta[count]->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
2857  histograms.h_denom_caloparticle_phi[count]->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
2858 
2859  } //end of loop through CaloParticles
2860 
2861  // Here we do fill the plots to compute the different metrics linked to
2862  // reco-level, namely fake-rate an merge-rate. In this loop we should *not*
2863  // restrict only to the selected caloParaticles.
2864  for (unsigned int tstId = 0; tstId < nTracksters; ++tstId) {
2865  if (tracksters[tstId].vertices().empty())
2866  continue;
2867  auto assocFakeMerge = tracksters_fakemerge[tstId];
2868  auto assocDuplicate = tracksters_duplicate[tstId];
2869  if (assocDuplicate) {
2870  histograms.h_numDup_trackster_eta[count]->Fill(tracksters[tstId].barycenter().eta());
2871  histograms.h_numDup_trackster_phi[count]->Fill(tracksters[tstId].barycenter().phi());
2872  }
2873  if (assocFakeMerge > 0) {
2874  histograms.h_num_trackster_eta[count]->Fill(tracksters[tstId].barycenter().eta());
2875  histograms.h_num_trackster_phi[count]->Fill(tracksters[tstId].barycenter().phi());
2876  auto best = std::min_element(std::begin(cpsInTrackster[tstId]),
2877  std::end(cpsInTrackster[tstId]),
2878  [](const auto& obj1, const auto& obj2) { return obj1.second < obj2.second; });
2879 
2880  //This is the shared energy taking the best caloparticle in each layer
2881  float sharedeneCPallLayers = 0.;
2882  //Loop through all layers
2883  for (unsigned int j = 0; j < layers * 2; ++j) {
2884  auto const& best_cp_linked = cPOnLayer[best->first][j].layerClusterIdToEnergyAndScore[tstId];
2885  sharedeneCPallLayers += best_cp_linked.first;
2886  } //end of loop through layers
2887  histograms.h_sharedenergy_trackster2caloparticle_vs_eta[count]->Fill(
2888  tracksters[tstId].barycenter().eta(), sharedeneCPallLayers / tracksters[tstId].raw_energy());
2889  histograms.h_sharedenergy_trackster2caloparticle_vs_phi[count]->Fill(
2890  tracksters[tstId].barycenter().phi(), sharedeneCPallLayers / tracksters[tstId].raw_energy());
2891  }
2892  if (assocFakeMerge >= 2) {
2893  histograms.h_numMerge_trackster_eta[count]->Fill(tracksters[tstId].barycenter().eta());
2894  histograms.h_numMerge_trackster_phi[count]->Fill(tracksters[tstId].barycenter().phi());
2895  }
2896  histograms.h_denom_trackster_eta[count]->Fill(tracksters[tstId].barycenter().eta());
2897  histograms.h_denom_trackster_phi[count]->Fill(tracksters[tstId].barycenter().phi());
2898  }
2899 }
2900 
2902  int count,
2903  const ticl::TracksterCollection& tracksters,
2905  std::vector<CaloParticle> const& cP,
2906  std::vector<size_t> const& cPIndices,
2907  std::vector<size_t> const& cPSelectedIndices,
2908  std::unordered_map<DetId, const HGCRecHit*> const& hitMap,
2909  unsigned int layers) const {
2910  //Each event to be treated as two events:
2911  //an event in +ve endcap, plus another event in -ve endcap.
2912 
2913  //To keep track of total num of Tracksters
2914  int totNTstZm = 0; //-z
2915  int totNTstZp = 0; //+z
2916  //To count the number of Tracksters with 3 contiguous layers per event.
2917  int totNContTstZp = 0; //+z
2918  int totNContTstZm = 0; //-z
2919  //For the number of Tracksters without 3 contiguous layers per event.
2920  int totNNotContTstZp = 0; //+z
2921  int totNNotContTstZm = 0; //-z
2922  //We want to check below the score of cont and non cont Tracksters
2923  std::vector<bool> contTracksters;
2924  contTracksters.clear();
2925 
2926  //[tstId]-> vector of 2d layer clusters size
2927  std::unordered_map<unsigned int, std::vector<unsigned int>> multiplicity;
2928  //[tstId]-> [layer][cluster size]
2929  std::unordered_map<unsigned int, std::vector<unsigned int>> multiplicity_vs_layer;
2930  //We will need for the scale text option
2931  // unsigned int totalLcInTsts = 0;
2932  // for (unsigned int tstId = 0; tstId < nTracksters; ++tstId) {
2933  // totalLcInTsts = totalLcInTsts + tracksters[tstId].vertices().size();
2934  // }
2935 
2936  auto nTracksters = tracksters.size();
2937  //loop through Tracksters of the event
2938  for (unsigned int tstId = 0; tstId < nTracksters; ++tstId) {
2939  auto nLayerClusters = tracksters[tstId].vertices().size();
2940 
2941  if (nLayerClusters == 0)
2942  continue;
2943 
2944  if (tracksters[tstId].barycenter().z() < 0.) {
2945  totNTstZm++;
2946  }
2947  if (tracksters[tstId].barycenter().z() > 0.) {
2948  totNTstZp++;
2949  }
2950 
2951  //Total number of layer clusters in Trackster
2952  int tnLcInTst = 0;
2953 
2954  //To keep track of total num of layer clusters per Trackster
2955  //tnLcInTstperlaypz[layerid], tnLcInTstperlaymz[layerid]
2956  std::vector<int> tnLcInTstperlay(1000, 0); //+z
2957 
2958  //For the layers the Trackster expands to. Will use a set because there would be many
2959  //duplicates and then go back to vector for random access, since they say it is faster.
2960  std::set<int> trackster_layers;
2961 
2962  bool tracksterInZplus = false;
2963  bool tracksterInZminus = false;
2964 
2965  //Loop through layer clusters
2966  for (const auto lcId : tracksters[tstId].vertices()) {
2967  //take the hits and their fraction of the specific layer cluster.
2968  const std::vector<std::pair<DetId, float>>& hits_and_fractions = layerClusters[lcId].hitsAndFractions();
2969 
2970  //For the multiplicity of the 2d layer clusters in Tracksters
2971  multiplicity[tstId].emplace_back(hits_and_fractions.size());
2972 
2973  const auto firstHitDetId = hits_and_fractions[0].first;
2974  //The layer that the layer cluster belongs to
2975  int layerid = recHitTools_->getLayerWithOffset(firstHitDetId) +
2976  layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
2977  trackster_layers.insert(layerid);
2978  multiplicity_vs_layer[tstId].emplace_back(layerid);
2979 
2980  tnLcInTstperlay[layerid]++;
2981  tnLcInTst++;
2982 
2983  if (recHitTools_->zside(firstHitDetId) > 0.) {
2984  tracksterInZplus = true;
2985  }
2986  if (recHitTools_->zside(firstHitDetId) < 0.) {
2987  tracksterInZminus = true;
2988  }
2989 
2990  } // end of loop through layerClusters
2991 
2992  // Per layer : Loop 0->99
2993  for (unsigned ilayer = 0; ilayer < layers * 2; ++ilayer) {
2994  if (histograms.h_clusternum_in_trackster_perlayer[count].count(ilayer) && tnLcInTstperlay[ilayer] != 0) {
2995  histograms.h_clusternum_in_trackster_perlayer[count].at(ilayer)->Fill((float)tnLcInTstperlay[ilayer]);
2996  }
2997  // For the profile now of 2d layer cluster in Tracksters vs layer number.
2998  if (tnLcInTstperlay[ilayer] != 0) {
2999  histograms.h_clusternum_in_trackster_vs_layer[count]->Fill((float)ilayer, (float)tnLcInTstperlay[ilayer]);
3000  }
3001  } // end of loop over layers
3002 
3003  // Looking for Tracksters with 3 contiguous layers per event.
3004  std::vector<int> trackster_layers_vec(trackster_layers.begin(), trackster_layers.end());
3005  // Since we want to also check for non contiguous Tracksters
3006  bool contiTrackster = false;
3007  //Observe that we start from 1 and go up to size - 1 element.
3008  if (trackster_layers_vec.size() >= 3) {
3009  for (unsigned int i = 1; i < trackster_layers_vec.size() - 1; ++i) {
3010  if ((trackster_layers_vec[i - 1] + 1 == trackster_layers_vec[i]) &&
3011  (trackster_layers_vec[i + 1] - 1 == trackster_layers_vec[i])) {
3012  //So, this is a Trackster with 3 contiguous layers per event
3013  if (tracksterInZplus) {
3014  totNContTstZp++;
3015  }
3016  if (tracksterInZminus) {
3017  totNContTstZm++;
3018  }
3019  contiTrackster = true;
3020  break;
3021  }
3022  }
3023  }
3024  // Count non contiguous Tracksters
3025  if (!contiTrackster) {
3026  if (tracksterInZplus) {
3027  totNNotContTstZp++;
3028  }
3029  if (tracksterInZminus) {
3030  totNNotContTstZm++;
3031  }
3032  }
3033 
3034  // Save for the score
3035  contTracksters.push_back(contiTrackster);
3036 
3037  histograms.h_clusternum_in_trackster[count]->Fill(tnLcInTst);
3038 
3039  for (unsigned int lc = 0; lc < multiplicity[tstId].size(); ++lc) {
3040  //multiplicity of the current LC
3041  float mlp = std::count(std::begin(multiplicity[tstId]), std::end(multiplicity[tstId]), multiplicity[tstId][lc]);
3042  //LogDebug("HGCalValidator") << "mlp %" << (100. * mlp)/ ((float) nLayerClusters) << std::endl;
3043  // histograms.h_multiplicityOfLCinTST[count]->Fill( mlp , multiplicity[tstId][lc] , 100. / (float) totalLcInTsts );
3044  histograms.h_multiplicityOfLCinTST[count]->Fill(mlp, multiplicity[tstId][lc]);
3045  //When we will plot with the text option we want the entries to be the same
3046  //as the % of the current cell over the whole number of layerClusters. For this we need an extra histo.
3047  histograms.h_multiplicity_numberOfEventsHistogram[count]->Fill(mlp);
3048  //For the cluster multiplicity vs layer
3049  //First with the -z endcap (V10:0->49)
3050  if (multiplicity_vs_layer[tstId][lc] < layers) {
3051  histograms.h_multiplicityOfLCinTST_vs_layercluster_zminus[count]->Fill(mlp, multiplicity_vs_layer[tstId][lc]);
3052  histograms.h_multiplicity_zminus_numberOfEventsHistogram[count]->Fill(mlp);
3053  } else { //Then for the +z (V10:50->99)
3054  histograms.h_multiplicityOfLCinTST_vs_layercluster_zplus[count]->Fill(
3055  mlp, multiplicity_vs_layer[tstId][lc] - layers);
3056  histograms.h_multiplicity_zplus_numberOfEventsHistogram[count]->Fill(mlp);
3057  }
3058  //For the cluster multiplicity vs cluster energy
3059  histograms.h_multiplicityOfLCinTST_vs_layerclusterenergy[count]->Fill(
3060  mlp, layerClusters[tracksters[tstId].vertices(lc)].energy());
3061  }
3062 
3063  if (!trackster_layers.empty()) {
3064  histograms.h_trackster_x[count]->Fill(tracksters[tstId].barycenter().x());
3065  histograms.h_trackster_y[count]->Fill(tracksters[tstId].barycenter().y());
3066  histograms.h_trackster_z[count]->Fill(tracksters[tstId].barycenter().z());
3067  histograms.h_trackster_eta[count]->Fill(tracksters[tstId].barycenter().eta());
3068  histograms.h_trackster_phi[count]->Fill(tracksters[tstId].barycenter().phi());
3069 
3070  histograms.h_trackster_firstlayer[count]->Fill((float)*trackster_layers.begin());
3071  histograms.h_trackster_lastlayer[count]->Fill((float)*trackster_layers.rbegin());
3072  histograms.h_trackster_layersnum[count]->Fill((float)trackster_layers.size());
3073 
3074  histograms.h_trackster_pt[count]->Fill(tracksters[tstId].raw_pt());
3075 
3076  histograms.h_trackster_energy[count]->Fill(tracksters[tstId].raw_energy());
3077  }
3078 
3079  } //end of loop through Tracksters
3080 
3081  histograms.h_tracksternum[count]->Fill(totNTstZm + totNTstZp);
3082  histograms.h_conttracksternum[count]->Fill(totNContTstZp + totNContTstZm);
3083  histograms.h_nonconttracksternum[count]->Fill(totNNotContTstZp + totNNotContTstZm);
3084 
3086  histograms, count, tracksters, layerClusters, cP, cPIndices, cPSelectedIndices, hitMap, layers);
3087 }
3088 
3090  const double y1,
3091  const double x2,
3092  const double y2) const { //distance squared
3093  const double dx = x1 - x2;
3094  const double dy = y1 - y2;
3095  return (dx * dx + dy * dy);
3096 } //distance squaredq
3098  const double y1,
3099  const double x2,
3100  const double y2) const { //2-d distance on the layer (x-y)
3101  return std::sqrt(distance2(x1, y1, x2, y2));
3102 }
3103 
3104 void HGVHistoProducerAlgo::setRecHitTools(std::shared_ptr<hgcal::RecHitTools> recHitTools) {
3105  recHitTools_ = recHitTools;
3106 }
3107 
3109  std::unordered_map<DetId, const HGCRecHit*> const& hitMap) const {
3110  DetId themaxid;
3111  const std::vector<std::pair<DetId, float>>& hits_and_fractions = cluster.hitsAndFractions();
3112 
3113  double maxene = 0.;
3114  for (std::vector<std::pair<DetId, float>>::const_iterator it_haf = hits_and_fractions.begin();
3115  it_haf != hits_and_fractions.end();
3116  ++it_haf) {
3117  DetId rh_detid = it_haf->first;
3118 
3119  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
3120  const HGCRecHit* hit = itcheck->second;
3121 
3122  if (maxene < hit->energy()) {
3123  maxene = hit->energy();
3124  themaxid = rh_detid;
3125  }
3126  }
3127 
3128  return themaxid;
3129 }
3130 
3131 double HGVHistoProducerAlgo::getEta(double eta) const {
3132  if (useFabsEta_)
3133  return fabs(eta);
3134  else
3135  return eta;
3136 }
HGVHistoProducerAlgo::bookSimClusterAssociationHistos
void bookSimClusterAssociationHistos(DQMStore::IBooker &ibook, Histograms &histograms, unsigned int layers, std::vector< int > thicknesses)
Definition: HGVHistoProducerAlgo.cc:338
HGVHistoProducerAlgo::nintLongDepBary_
int nintLongDepBary_
Definition: HGVHistoProducerAlgo.h:362
HGVHistoProducerAlgo::minTotNsimClsperthick_
double minTotNsimClsperthick_
Definition: HGVHistoProducerAlgo.h:378
HGVHistoProducerAlgo::maxEta_
double maxEta_
Definition: HGVHistoProducerAlgo.h:346
DDAxes::y
HGVHistoProducerAlgo::bookInfo
void bookInfo(DQMStore::IBooker &ibook, Histograms &histograms)
Definition: HGVHistoProducerAlgo.cc:204
HGVHistoProducerAlgo::maxTSTSharedEneFrac_
double maxTSTSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:376
HGVHistoProducerAlgo::maxY_
double maxY_
Definition: HGVHistoProducerAlgo.h:412
electrons_cff.bool
bool
Definition: electrons_cff.py:366
HGVHistoProducerAlgo::nintTotNcellsperthickperlayer_
int nintTotNcellsperthickperlayer_
Definition: HGVHistoProducerAlgo.h:383
edm::AssociationMap::find
const_iterator find(const key_type &k) const
find element with specified reference key
Definition: AssociationMap.h:173
mps_fire.i
i
Definition: mps_fire.py:428
HGVHistoProducerAlgo::maxClEnepermultiplicity_
double maxClEnepermultiplicity_
Definition: HGVHistoProducerAlgo.h:408
HGVHistoProducerAlgo::nintDisSeedToMaxperthickperlayer_
int nintDisSeedToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:393
MessageLogger.h
HGVHistoProducerAlgo::minMixedHitsCluster_
double minMixedHitsCluster_
Definition: HGVHistoProducerAlgo.h:357
ScoreCutTSTtoCPFakeMerge_
const double ScoreCutTSTtoCPFakeMerge_
Definition: HGVHistoProducerAlgo.cc:19
HGVHistoProducerAlgo::useFabsEta_
bool useFabsEta_
Definition: HGVHistoProducerAlgo.h:348
HGVHistoProducerAlgo::minZ_
double minZ_
Definition: HGVHistoProducerAlgo.h:414
HGVHistoProducerAlgo::bookClusterHistos_ClusterLevel
void bookClusterHistos_ClusterLevel(DQMStore::IBooker &ibook, Histograms &histograms, unsigned int layers, std::vector< int > thicknesses, std::string pathtomatbudfile)
Definition: HGVHistoProducerAlgo.cc:595
CaloParticle::eta
float eta() const
Momentum pseudorapidity. Note this is taken from the simtrack before the calorimeter.
Definition: CaloParticle.h:142
HGVHistoProducerAlgo::nintTotNClsinTSTsperlayer_
int nintTotNClsinTSTsperlayer_
Definition: HGVHistoProducerAlgo.h:403
HGVHistoProducerAlgo::maxMixedHitsCluster_
double maxMixedHitsCluster_
Definition: HGVHistoProducerAlgo.h:357
HGVHistoProducerAlgo::minTSTSharedEneFracEfficiency_
double minTSTSharedEneFracEfficiency_
Definition: HGVHistoProducerAlgo.h:375
HGVHistoProducerAlgo::nintClEnepermultiplicity_
int nintClEnepermultiplicity_
Definition: HGVHistoProducerAlgo.h:409
HGVHistoProducerAlgo::nintPhi_
int nintPhi_
Definition: HGVHistoProducerAlgo.h:354
HGVHistoProducerAlgo::minTotNTSTs_
double minTotNTSTs_
Definition: HGVHistoProducerAlgo.h:398
HGVHistoProducerAlgo::fill_generic_cluster_histos
void fill_generic_cluster_histos(const Histograms &histograms, int count, edm::Handle< reco::CaloClusterCollection > clusterHandle, const reco::CaloClusterCollection &clusters, const Density &densities, edm::Handle< std::vector< CaloParticle >> caloParticleHandle, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::vector< size_t > const &cPSelectedIndices, std::unordered_map< DetId, const HGCRecHit * > const &, std::map< double, double > cummatbudg, unsigned int layers, std::vector< int > thicknesses, const hgcal::RecoToSimCollection &recSimColl, const hgcal::SimToRecoCollection &simRecColl) const
Definition: HGVHistoProducerAlgo.cc:1944
HGVHistoProducerAlgo::maxEneClperlay_
double maxEneClperlay_
Definition: HGVHistoProducerAlgo.h:369
HGVHistoProducerAlgo::bookCaloParticleHistos
void bookCaloParticleHistos(DQMStore::IBooker &ibook, Histograms &histograms, int pdgid, unsigned int layers)
Definition: HGVHistoProducerAlgo.cc:213
f
double f[11][100]
Definition: MuScleFitUtils.cc:78
HGVHistoProducerAlgo::nintZpos_
int nintZpos_
Definition: HGVHistoProducerAlgo.h:364
HGVHistoProducerAlgo::minDisToMaxperthickperlayer_
double minDisToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:388
min
T min(T a, T b)
Definition: MathUtil.h:58
ecaldqm::zside
int zside(DetId const &)
Definition: EcalDQMCommonUtils.cc:189
HGVHistoProducerAlgo::tracksters_to_CaloParticles
void tracksters_to_CaloParticles(const Histograms &histograms, int count, const ticl::TracksterCollection &Tracksters, const reco::CaloClusterCollection &layerClusters, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::vector< size_t > const &cPSelectedIndices, std::unordered_map< DetId, const HGCRecHit * > const &, unsigned int layers) const
Definition: HGVHistoProducerAlgo.cc:2271
HGVHistoProducerAlgo::maxTotNsimClsperlay_
double maxTotNsimClsperlay_
Definition: HGVHistoProducerAlgo.h:365
testProducerWithPsetDescEmpty_cfi.x2
x2
Definition: testProducerWithPsetDescEmpty_cfi.py:28
HGVHistoProducerAlgo::minX_
double minX_
Definition: HGVHistoProducerAlgo.h:410
DetId::det
constexpr Detector det() const
get the detector field from this detid
Definition: DetId.h:46
HGVHistoProducerAlgo::bookClusterHistos_LCtoCP_association
void bookClusterHistos_LCtoCP_association(DQMStore::IBooker &ibook, Histograms &histograms, unsigned int layers)
Definition: HGVHistoProducerAlgo.cc:692
HGVHistoProducerAlgo::layerClusters_to_CaloParticles
void layerClusters_to_CaloParticles(const Histograms &histograms, edm::Handle< reco::CaloClusterCollection > clusterHandle, const reco::CaloClusterCollection &clusters, edm::Handle< std::vector< CaloParticle >> caloParticleHandle, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::vector< size_t > const &cPSelectedIndices, std::unordered_map< DetId, const HGCRecHit * > const &, unsigned int layers, const hgcal::RecoToSimCollection &recSimColl, const hgcal::SimToRecoCollection &simRecColl) const
Definition: HGVHistoProducerAlgo.cc:1494
HGVHistoProducerAlgo::~HGVHistoProducerAlgo
~HGVHistoProducerAlgo()
Definition: HGVHistoProducerAlgo.cc:202
HGVHistoProducerAlgo::maxTotNClsperthick_
double maxTotNClsperthick_
Definition: HGVHistoProducerAlgo.h:380
HGVHistoProducerAlgo::nintEne_
int nintEne_
Definition: HGVHistoProducerAlgo.h:350
FastTrackerRecHitCombiner_cfi.simHits
simHits
Definition: FastTrackerRecHitCombiner_cfi.py:5
HGVHistoProducerAlgo::HGVHistoProducerAlgo
HGVHistoProducerAlgo(const edm::ParameterSet &pset)
Definition: HGVHistoProducerAlgo.cc:22
CaloParticle::g4Tracks
const std::vector< SimTrack > & g4Tracks() const
Definition: CaloParticle.h:74
HGVHistoProducerAlgo::minTotNClsinTSTsperlayer_
double minTotNClsinTSTsperlayer_
Definition: HGVHistoProducerAlgo.h:402
CaloParticle::energy
float energy() const
Energy. Note this is taken from the first SimTrack only.
Definition: CaloParticle.h:98
DDAxes::x
edm::RefVector< SimClusterCollection >
ScoreCutCPtoTSTEffDup_
const double ScoreCutCPtoTSTEffDup_
Definition: HGVHistoProducerAlgo.cc:20
SimCluster
Monte Carlo truth information used for tracking validation.
Definition: SimCluster.h:29
HGVHistoProducerAlgo::nintTotNsimClsperthick_
int nintTotNsimClsperthick_
Definition: HGVHistoProducerAlgo.h:379
findQualityFiles.v
v
Definition: findQualityFiles.py:179
ScoreCutLCtoCP_
const double ScoreCutLCtoCP_
Definition: HGVHistoProducerAlgo.cc:15
HGVHistoProducerAlgo::minEneClperlay_
double minEneClperlay_
Definition: HGVHistoProducerAlgo.h:369
spr::find
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
Definition: FindCaloHit.cc:19
newFWLiteAna.found
found
Definition: newFWLiteAna.py:118
edm::Handle< reco::CaloClusterCollection >
AlignmentTracksFromVertexSelector_cfi.vertices
vertices
Definition: AlignmentTracksFromVertexSelector_cfi.py:5
HGVHistoProducerAlgo::nintTotNsimClsperlay_
int nintTotNsimClsperlay_
Definition: HGVHistoProducerAlgo.h:366
edm::Ref
Definition: AssociativeIterator.h:58
HGVHistoProducerAlgo::nintDisToMaxperthickperlayerenewei_
int nintDisToMaxperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:391
heavyIonCSV_trainingSettings.idx
idx
Definition: heavyIonCSV_trainingSettings.py:5
HGVHistoProducerAlgo::fill_caloparticle_histos
void fill_caloparticle_histos(const Histograms &histograms, int pdgid, const CaloParticle &caloparticle, std::vector< SimVertex > const &simVertices, unsigned int layers, std::unordered_map< DetId, const HGCRecHit * > const &) const
Definition: HGVHistoProducerAlgo.cc:1213
trackingPlots.assoc
assoc
Definition: trackingPlots.py:183
HGVHistoProducerAlgo::nintEneCl_
int nintEneCl_
Definition: HGVHistoProducerAlgo.h:360
HGVHistoProducerAlgo::maxTotNClsinTSTsperlayer_
double maxTotNClsinTSTsperlayer_
Definition: HGVHistoProducerAlgo.h:402
HGVHistoProducerAlgo::minTotNsimClsperlay_
double minTotNsimClsperlay_
Definition: HGVHistoProducerAlgo.h:365
HGVHistoProducerAlgo::maxDisToMaxperthickperlayerenewei_
double maxDisToMaxperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:390
HGVHistoProducerAlgo::maxEneCl_
double maxEneCl_
Definition: HGVHistoProducerAlgo.h:359
DetId
Definition: DetId.h:17
HGVHistoProducerAlgo::setRecHitTools
void setRecHitTools(std::shared_ptr< hgcal::RecHitTools > recHitTools)
Definition: HGVHistoProducerAlgo.cc:3104
HGVHistoProducerAlgo::minLongDepBary_
double minLongDepBary_
Definition: HGVHistoProducerAlgo.h:361
edm::AssociationMap::end
const_iterator end() const
last iterator over the map (read only)
Definition: AssociationMap.h:171
HGVHistoProducerAlgo::maxZpos_
double maxZpos_
Definition: HGVHistoProducerAlgo.h:363
DetId::HGCalHSi
Definition: DetId.h:33
DetId::HGCalEE
Definition: DetId.h:32
testProducerWithPsetDescEmpty_cfi.x1
x1
Definition: testProducerWithPsetDescEmpty_cfi.py:33
dqmdumpme.last
last
Definition: dqmdumpme.py:56
testProducerWithPsetDescEmpty_cfi.y1
y1
Definition: testProducerWithPsetDescEmpty_cfi.py:29
ScoreCutSCtoLC_
const double ScoreCutSCtoLC_
Definition: HGVHistoProducerAlgo.cc:18
HGVHistoProducerAlgo.h
SimCluster.h
h
HGVHistoProducerAlgo::maxTotNTSTs_
double maxTotNTSTs_
Definition: HGVHistoProducerAlgo.h:398
HGVHistoProducerAlgo::maxMplofLCs_
double maxMplofLCs_
Definition: HGVHistoProducerAlgo.h:404
HLT_FULL_cff.fraction
fraction
Definition: HLT_FULL_cff.py:52823
HGVHistoProducerAlgo::nintDisToSeedperthickperlayer_
int nintDisToSeedperthickperlayer_
Definition: HGVHistoProducerAlgo.h:385
HGVHistoProducerAlgo::minEneCl_
double minEneCl_
Definition: HGVHistoProducerAlgo.h:359
Calorimetry_cff.thickness
thickness
Definition: Calorimetry_cff.py:115
PVValHelper::eta
Definition: PVValidationHelpers.h:70
HGVHistoProducerAlgo::maxLongDepBary_
double maxLongDepBary_
Definition: HGVHistoProducerAlgo.h:361
HGVHistoProducerAlgo::minMixedHitsSimCluster_
double minMixedHitsSimCluster_
Definition: HGVHistoProducerAlgo.h:355
HGVHistoProducerAlgo::maxTotNsimClsperthick_
double maxTotNsimClsperthick_
Definition: HGVHistoProducerAlgo.h:378
reco::CaloCluster
Definition: CaloCluster.h:31
mathSSE::sqrt
T sqrt(T t)
Definition: SSEVec.h:19
reco::CaloClusterCollection
std::vector< CaloCluster > CaloClusterCollection
collection of CaloCluster objects
Definition: CaloClusterFwd.h:19
mps_fire.end
end
Definition: mps_fire.py:242
DDAxes::z
HGVHistoProducerAlgo::nintMixedHitsSimCluster_
int nintMixedHitsSimCluster_
Definition: HGVHistoProducerAlgo.h:356
HGVHistoProducerAlgo::nintMplofLCs_
int nintMplofLCs_
Definition: HGVHistoProducerAlgo.h:405
dqm::implementation::IBooker::bookProfile
MonitorElement * bookProfile(TString const &name, TString const &title, int nchX, double lowX, double highX, int, double lowY, double highY, char const *option="s", FUNC onbooking=NOOP())
Definition: DQMStore.h:322
HGVHistoProducerAlgo::maxDisToSeedperthickperlayerenewei_
double maxDisToSeedperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:386
CaloParticle::phi
float phi() const
Momentum azimuthal angle. Note this is taken from the first SimTrack only.
Definition: CaloParticle.h:134
submitPVResolutionJobs.count
count
Definition: submitPVResolutionJobs.py:352
ScoreCutLCtoSC_
const double ScoreCutLCtoSC_
Definition: HGVHistoProducerAlgo.cc:17
HCALHighEnergyHPDFilter_cfi.energy
energy
Definition: HCALHighEnergyHPDFilter_cfi.py:5
HGVHistoProducerAlgo::getEta
double getEta(double eta) const
Definition: HGVHistoProducerAlgo.cc:3131
HGVHistoProducerAlgo::minClEneperthickperlayer_
double minClEneperthickperlayer_
Definition: HGVHistoProducerAlgo.h:394
HGVHistoProducerAlgo::minClEnepermultiplicity_
double minClEnepermultiplicity_
Definition: HGVHistoProducerAlgo.h:408
HLTEgPhaseIITestSequence_cff.layerClusters
layerClusters
Definition: HLTEgPhaseIITestSequence_cff.py:2506
HGVHistoProducerAlgo::nintZ_
int nintZ_
Definition: HGVHistoProducerAlgo.h:415
HGVHistoProducerAlgo::nintPt_
int nintPt_
Definition: HGVHistoProducerAlgo.h:352
HGVHistoProducerAlgo::nintTotNClsperlay_
int nintTotNClsperlay_
Definition: HGVHistoProducerAlgo.h:368
HGVHistoProducerAlgo::nintSharedEneFrac_
int nintSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:374
HGVHistoProducerAlgo::maxPt_
double maxPt_
Definition: HGVHistoProducerAlgo.h:351
HGVHistoProducerAlgo::minSharedEneFrac_
double minSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:373
getGTfromDQMFile.obj
obj
Definition: getGTfromDQMFile.py:32
HGVHistoProducerAlgo::nintScore_
int nintScore_
Definition: HGVHistoProducerAlgo.h:372
HGVHistoProducerAlgo::fill_cluster_histos
void fill_cluster_histos(const Histograms &histograms, int count, const reco::CaloCluster &cluster) const
Definition: HGVHistoProducerAlgo.cc:1487
HGVHistoProducerAlgo::maxDisToSeedperthickperlayer_
double maxDisToSeedperthickperlayer_
Definition: HGVHistoProducerAlgo.h:384
HGVHistoProducerAlgo::recHitTools_
std::shared_ptr< hgcal::RecHitTools > recHitTools_
Definition: HGVHistoProducerAlgo.h:343
phase1PixelTopology::layer
constexpr std::array< uint8_t, layerIndexSize > layer
Definition: phase1PixelTopology.h:99
HGVHistoProducerAlgo::minDisToSeedperthickperlayer_
double minDisToSeedperthickperlayer_
Definition: HGVHistoProducerAlgo.h:384
HGCRecHit
Definition: HGCRecHit.h:14
HGVHistoProducerAlgo::distance
double distance(const double x1, const double y1, const double x2, const double y2) const
Definition: HGVHistoProducerAlgo.cc:3097
HGVHistoProducerAlgo::nintCellsEneDensperthick_
int nintCellsEneDensperthick_
Definition: HGVHistoProducerAlgo.h:397
HGVHistoProducerAlgo::minPhi_
double minPhi_
Definition: HGVHistoProducerAlgo.h:353
bsc_activity_cfg.clusters
clusters
Definition: bsc_activity_cfg.py:36
testProducerWithPsetDescEmpty_cfi.y2
y2
Definition: testProducerWithPsetDescEmpty_cfi.py:30
edm::helpers::KeyVal::val
V val
Definition: AssociationMapHelpers.h:33
LogDebug
#define LogDebug(id)
Definition: MessageLogger.h:233
edm::ParameterSet
Definition: ParameterSet.h:47
HGVHistoProducerAlgo::maxClEneperthickperlayer_
double maxClEneperthickperlayer_
Definition: HGVHistoProducerAlgo.h:394
CaloParticle
Definition: CaloParticle.h:16
AlCaHLTBitMon_ParallelJobs.p
def p
Definition: AlCaHLTBitMon_ParallelJobs.py:153
reco::CaloCluster::eta
double eta() const
pseudorapidity of cluster centroid
Definition: CaloCluster.h:181
reco::CaloCluster::hitsAndFractions
const std::vector< std::pair< DetId, float > > & hitsAndFractions() const
Definition: CaloCluster.h:210
SiStripPI::max
Definition: SiStripPayloadInspectorHelper.h:169
HGVHistoProducerAlgo::maxSizeCLsinTSTs_
double maxSizeCLsinTSTs_
Definition: HGVHistoProducerAlgo.h:406
edm::AssociationMap
Definition: AssociationMap.h:48
jetUpdater_cfi.sort
sort
Definition: jetUpdater_cfi.py:29
HGVHistoProducerAlgo::nintEneClperlay_
int nintEneClperlay_
Definition: HGVHistoProducerAlgo.h:370
match
std::pair< typename Association::data_type::first_type, double > match(Reference key, Association association, bool bestMatchByMaxValue)
Generic matching function.
Definition: Utils.h:10
HGVHistoProducerAlgo::maxZ_
double maxZ_
Definition: HGVHistoProducerAlgo.h:414
ScoreCutCPtoLC_
const double ScoreCutCPtoLC_
Definition: HGVHistoProducerAlgo.cc:16
position
static int position[264][3]
Definition: ReadPGInfo.cc:289
dqm::implementation::IBooker::bookInt
MonitorElement * bookInt(TString const &name, FUNC onbooking=NOOP())
Definition: DQMStore.h:73
HGVHistoProducerAlgo::nintSizeCLsinTSTs_
int nintSizeCLsinTSTs_
Definition: HGVHistoProducerAlgo.h:407
HGVHistoProducerAlgo::nintClEneperthickperlayer_
int nintClEneperthickperlayer_
Definition: HGVHistoProducerAlgo.h:395
HGVHistoProducerAlgo::maxSharedEneFrac_
double maxSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:373
HGVHistoProducerAlgo::detIdInfoInCluster
Definition: HGVHistoProducerAlgo.h:320
createfilelist.int
int
Definition: createfilelist.py:10
HGVHistoProducerAlgo::minEta_
double minEta_
Definition: HGVHistoProducerAlgo.h:346
HGVHistoProducerAlgo::maxPhi_
double maxPhi_
Definition: HGVHistoProducerAlgo.h:353
trackerHitRTTI::vector
Definition: trackerHitRTTI.h:21
HGVHistoProducerAlgo::maxX_
double maxX_
Definition: HGVHistoProducerAlgo.h:410
PVValHelper::dy
Definition: PVValidationHelpers.h:50
ticl::Trackster::vertex_multiplicity
std::vector< float > & vertex_multiplicity()
Definition: Trackster.h:57
histograms
Definition: histograms.py:1
HGVHistoProducerAlgo::minPt_
double minPt_
Definition: HGVHistoProducerAlgo.h:351
HGVHistoProducerAlgo::nintDisToMaxperthickperlayer_
int nintDisToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:389
CaloParticle::pt
float pt() const
Transverse momentum. Note this is taken from the first SimTrack only.
Definition: CaloParticle.h:130
AlCaHLTBitMon_QueryRunRegistry.string
string string
Definition: AlCaHLTBitMon_QueryRunRegistry.py:256
HGVHistoProducerAlgo::nintTotNClsperthick_
int nintTotNClsperthick_
Definition: HGVHistoProducerAlgo.h:381
HGVHistoProducerAlgo::nintMixedHitsCluster_
int nintMixedHitsCluster_
Definition: HGVHistoProducerAlgo.h:358
HGVHistoProducerAlgo::nintTotNClsinTSTs_
int nintTotNClsinTSTs_
Definition: HGVHistoProducerAlgo.h:401
HGVHistoProducerAlgo::maxTotNClsperlay_
double maxTotNClsperlay_
Definition: HGVHistoProducerAlgo.h:367
HGVHistoProducerAlgo::minY_
double minY_
Definition: HGVHistoProducerAlgo.h:412
HGVHistoProducerAlgo::bookTracksterHistos
void bookTracksterHistos(DQMStore::IBooker &ibook, Histograms &histograms, unsigned int layers)
Definition: HGVHistoProducerAlgo.cc:967
DDAxes::phi
HGVHistoProducerAlgo::fill_trackster_histos
void fill_trackster_histos(const Histograms &histograms, int count, const ticl::TracksterCollection &Tracksters, const reco::CaloClusterCollection &layerClusters, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::vector< size_t > const &cPSelectedIndices, std::unordered_map< DetId, const HGCRecHit * > const &, unsigned int layers) const
Definition: HGVHistoProducerAlgo.cc:2901
HGVHistoProducerAlgo::maxTotNcellsperthickperlayer_
double maxTotNcellsperthickperlayer_
Definition: HGVHistoProducerAlgo.h:382
Histograms
Definition: Histograms.h:38
HGCalDetId
Definition: HGCalDetId.h:8
eostools.move
def move(src, dest)
Definition: eostools.py:511
std
Definition: JetResolutionObject.h:76
DetId::rawId
constexpr uint32_t rawId() const
get the raw id
Definition: DetId.h:57
HGVHistoProducerAlgo::maxDisSeedToMaxperthickperlayer_
double maxDisSeedToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:392
HGVHistoProducerAlgo::nintEta_
int nintEta_
Definition: HGVHistoProducerAlgo.h:347
ticl::TracksterCollection
std::vector< Trackster > TracksterCollection
Definition: Trackster.h:203
HGVHistoProducerAlgo::maxCellsEneDensperthick_
double maxCellsEneDensperthick_
Definition: HGVHistoProducerAlgo.h:396
tier0.unique
def unique(seq, keepstr=True)
Definition: tier0.py:24
HGVHistoProducerAlgo::nintY_
int nintY_
Definition: HGVHistoProducerAlgo.h:413
DetId::HGCalHSc
Definition: DetId.h:34
HGVHistoProducerAlgo::minTotNClsperlay_
double minTotNClsperlay_
Definition: HGVHistoProducerAlgo.h:367
HGVHistoProducerAlgo::minDisToMaxperthickperlayerenewei_
double minDisToMaxperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:390
dqm::implementation::IBooker::book2D
MonitorElement * book2D(TString const &name, TString const &title, int nchX, double lowX, double highX, int nchY, double lowY, double highY, FUNC onbooking=NOOP())
Definition: DQMStore.h:177
relativeConstraints.empty
bool empty
Definition: relativeConstraints.py:46
ticl::Trackster::vertices
std::vector< unsigned int > & vertices()
Definition: Trackster.h:56
HGVHistoProducerAlgo::minCellsEneDensperthick_
double minCellsEneDensperthick_
Definition: HGVHistoProducerAlgo.h:396
HGVHistoProducerAlgo::minZpos_
double minZpos_
Definition: HGVHistoProducerAlgo.h:363
HGVHistoProducerAlgo::minEne_
double minEne_
Definition: HGVHistoProducerAlgo.h:349
HGVHistoProducerAlgo::findmaxhit
DetId findmaxhit(const reco::CaloCluster &cluster, std::unordered_map< DetId, const HGCRecHit * > const &) const
Definition: HGVHistoProducerAlgo.cc:3108
HGVHistoProducerAlgo::layerClusters_to_SimClusters
void layerClusters_to_SimClusters(const Histograms &histograms, int count, edm::Handle< reco::CaloClusterCollection > clusterHandle, const reco::CaloClusterCollection &clusters, edm::Handle< std::vector< SimCluster >> simClusterHandle, std::vector< SimCluster > const &simClusters, std::vector< size_t > const &sCIndices, const std::vector< float > &mask, std::unordered_map< DetId, const HGCRecHit * > const &, unsigned int layers, const hgcal::RecoToSimCollectionWithSimClusters &recSimColl, const hgcal::SimToRecoCollectionWithSimClusters &simRecColl) const
Definition: HGVHistoProducerAlgo.cc:1761
HGVHistoProducerAlgo::minTotNcellsperthickperlayer_
double minTotNcellsperthickperlayer_
Definition: HGVHistoProducerAlgo.h:382
HGVHistoProducerAlgo::nintTotNTSTs_
int nintTotNTSTs_
Definition: HGVHistoProducerAlgo.h:399
dqm::implementation::IBooker
Definition: DQMStore.h:43
HGVHistoProducerAlgo::distance2
double distance2(const double x1, const double y1, const double x2, const double y2) const
Definition: HGVHistoProducerAlgo.cc:3089
SimCluster::hits_and_fractions
std::vector< std::pair< uint32_t, float > > hits_and_fractions() const
Returns list of rechit IDs and fractions for this SimCluster.
Definition: SimCluster.h:184
HGVHistoProducerAlgo::maxDisToMaxperthickperlayer_
double maxDisToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:388
HGVHistoProducerAlgo::maxEne_
double maxEne_
Definition: HGVHistoProducerAlgo.h:349
HGVHistoProducerAlgo::maxScore_
double maxScore_
Definition: HGVHistoProducerAlgo.h:371
HGVHistoProducerAlgo::fill_info_histos
void fill_info_histos(const Histograms &histograms, unsigned int layers) const
Definition: HGVHistoProducerAlgo.cc:1200
HGCalValidator_cfi.simVertices
simVertices
Definition: HGCalValidator_cfi.py:57
offlineSlimmedPrimaryVertices_cfi.score
score
Definition: offlineSlimmedPrimaryVertices_cfi.py:6
HGVHistoProducerAlgo::bookSimClusterHistos
void bookSimClusterHistos(DQMStore::IBooker &ibook, Histograms &histograms, unsigned int layers, std::vector< int > thicknesses)
Definition: HGVHistoProducerAlgo.cc:286
HGVHistoProducerAlgo::nintDisToSeedperthickperlayerenewei_
int nintDisToSeedperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:387
c
auto & c
Definition: CAHitNtupletGeneratorKernelsImpl.h:56
dqmiolumiharvest.j
j
Definition: dqmiolumiharvest.py:66
DetId::Forward
Definition: DetId.h:30
CaloParticle::simClusters
const SimClusterRefVector & simClusters() const
Definition: CaloParticle.h:72
HGVHistoProducerAlgo::minTSTSharedEneFrac_
double minTSTSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:376
HGVHistoProducerAlgo::nintX_
int nintX_
Definition: HGVHistoProducerAlgo.h:411
EgammaValidation_cff.pdgid
pdgid
Definition: EgammaValidation_cff.py:29
HGVHistoProducerAlgo::minTotNClsperthick_
double minTotNClsperthick_
Definition: HGVHistoProducerAlgo.h:380
edm::RefVector::size
size_type size() const
Size of the RefVector.
Definition: RefVector.h:102
HLT_FULL_cff.distance
distance
Definition: HLT_FULL_cff.py:7746
HGVHistoProducerAlgo::minTotNClsinTSTs_
double minTotNClsinTSTs_
Definition: HGVHistoProducerAlgo.h:400
HGVHistoProducerAlgoHistograms
Definition: HGVHistoProducerAlgo.h:31
HGVHistoProducerAlgo::bookClusterHistos_CellLevel
void bookClusterHistos_CellLevel(DQMStore::IBooker &ibook, Histograms &histograms, unsigned int layers, std::vector< int > thicknesses)
Definition: HGVHistoProducerAlgo.cc:859
PVValHelper::dx
Definition: PVValidationHelpers.h:49
HGVHistoProducerAlgo::minScore_
double minScore_
Definition: HGVHistoProducerAlgo.h:371
hgcalTopologyTester_cfi.layers
layers
Definition: hgcalTopologyTester_cfi.py:8
HGVHistoProducerAlgo::maxMixedHitsSimCluster_
double maxMixedHitsSimCluster_
Definition: HGVHistoProducerAlgo.h:355
HGVHistoProducerAlgo::minDisSeedToMaxperthickperlayer_
double minDisSeedToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:392
HGVHistoProducerAlgo::maxTotNClsinTSTs_
double maxTotNClsinTSTs_
Definition: HGVHistoProducerAlgo.h:400
HGVHistoProducerAlgo::minDisToSeedperthickperlayerenewei_
double minDisToSeedperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:386
hit
Definition: SiStripHitEffFromCalibTree.cc:88
ticl::Trackster
Definition: Trackster.h:19
HGVHistoProducerAlgo::detIdInfoInTrackster
Definition: HGVHistoProducerAlgo.h:326
muonDTDigis_cfi.pset
pset
Definition: muonDTDigis_cfi.py:27
dqm::implementation::IBooker::book1D
MonitorElement * book1D(TString const &name, TString const &title, int const nchX, double const lowX, double const highX, FUNC onbooking=NOOP())
Definition: DQMStore.h:98
HGVHistoProducerAlgo::minMplofLCs_
double minMplofLCs_
Definition: HGVHistoProducerAlgo.h:404
Density
hgcal_clustering::Density Density
Definition: HGCalImagingAlgo.h:29
GetRecoTauVFromDQM_MC_cff.next
next
Definition: GetRecoTauVFromDQM_MC_cff.py:31
HGVHistoProducerAlgo::minSizeCLsinTSTs_
double minSizeCLsinTSTs_
Definition: HGVHistoProducerAlgo.h:406