CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
RootTree.cc
Go to the documentation of this file.
1 #include "RootTree.h"
2 #include "RootDelayedReader.h"
6 #include "InputFile.h"
7 #include "TTree.h"
8 #include "TTreeIndex.h"
9 #include "TTreeCache.h"
10 
11 #include <cassert>
12 #include <iostream>
13 
14 namespace edm {
15  namespace {
16  TBranch* getAuxiliaryBranch(TTree* tree, BranchType const& branchType) {
17  TBranch* branch = tree->GetBranch(BranchTypeToAuxiliaryBranchName(branchType).c_str());
18  if (branch == nullptr) {
19  branch = tree->GetBranch(BranchTypeToAuxBranchName(branchType).c_str());
20  }
21  return branch;
22  }
23  TBranch* getProductProvenanceBranch(TTree* tree, BranchType const& branchType) {
24  TBranch* branch = tree->GetBranch(BranchTypeToBranchEntryInfoBranchName(branchType).c_str());
25  return branch;
26  }
27  }
28  RootTree::RootTree(std::shared_ptr<InputFile> filePtr,
29  BranchType const& branchType,
30  unsigned int nIndexes,
31  unsigned int maxVirtualSize,
32  unsigned int cacheSize,
33  unsigned int learningEntries,
34  bool enablePrefetching,
35  InputType inputType) :
36  filePtr_(filePtr),
37  tree_(dynamic_cast<TTree*>(filePtr_.get() != nullptr ? filePtr_->Get(BranchTypeToProductTreeName(branchType).c_str()) : nullptr)),
38  metaTree_(dynamic_cast<TTree*>(filePtr_.get() != nullptr ? filePtr_->Get(BranchTypeToMetaDataTreeName(branchType).c_str()) : nullptr)),
39  branchType_(branchType),
40  auxBranch_(tree_ ? getAuxiliaryBranch(tree_, branchType_) : nullptr),
41  treeCache_(),
42  rawTreeCache_(),
43  triggerTreeCache_(),
44  rawTriggerTreeCache_(),
45  trainedSet_(),
46  triggerSet_(),
47  entries_(tree_ ? tree_->GetEntries() : 0),
48  entryNumber_(-1),
49  entryNumberForIndex_(new std::vector<EntryNumber>(nIndexes, IndexIntoFile::invalidEntry)),
50  branchNames_(),
51  branches_(new BranchMap),
52  trainNow_(false),
53  switchOverEntry_(-1),
54  rawTriggerSwitchOverEntry_(-1),
55  learningEntries_(learningEntries),
56  cacheSize_(cacheSize),
57  treeAutoFlush_(0),
58  enablePrefetching_(enablePrefetching),
59  //enableTriggerCache_(branchType_ == InEvent),
60  enableTriggerCache_(false), // Disable, for now. Using the trigger cache in the multithreaded environment causes the assert on line 331 to fire occasionally.
61  rootDelayedReader_(new RootDelayedReader(*this, filePtr, inputType)),
62  branchEntryInfoBranch_(metaTree_ ? getProductProvenanceBranch(metaTree_, branchType_) : (tree_ ? getProductProvenanceBranch(tree_, branchType_) : 0)),
63  infoTree_(dynamic_cast<TTree*>(filePtr_.get() != nullptr ? filePtr->Get(BranchTypeToInfoTreeName(branchType).c_str()) : nullptr)) // backward compatibility
64  {
65  assert(tree_);
66  // On merged files in older releases of ROOT, the autoFlush setting is always negative; we must guess.
67  // TODO: On newer merged files, we should be able to get this from the cluster iterator.
68  long treeAutoFlush = (tree_ ? tree_->GetAutoFlush() : 0);
69  if (treeAutoFlush < 0) {
70  // The "+1" is here to avoid divide-by-zero in degenerate cases.
71  Long64_t averageEventSizeBytes = tree_->GetZipBytes() / (tree_->GetEntries()+1) + 1;
72  treeAutoFlush_ = cacheSize_/averageEventSizeBytes+1;
73  } else {
74  treeAutoFlush_ = treeAutoFlush;
75  }
78  }
79  setTreeMaxVirtualSize(maxVirtualSize);
80  setCacheSize(cacheSize);
81  if (tree_) {
82  Int_t branchCount = tree_->GetListOfBranches()->GetEntriesFast();
83  trainedSet_.reserve(branchCount);
84  triggerSet_.reserve(branchCount);
85  }
86  }
87 
89  }
90 
92  RootTree::entryNumberForIndex(unsigned int index) const {
93  assert(index < entryNumberForIndex_->size());
94  return (*entryNumberForIndex_)[index];
95  }
96 
97  void
99  assert(index < entryNumberForIndex_->size());
100  (*entryNumberForIndex_)[index] = entryNumber();
101  }
102 
103  bool
105  if (metaTree_ == nullptr || metaTree_->GetNbranches() == 0) {
106  return tree_ != nullptr && auxBranch_ != nullptr;
107  }
108  if (tree_ != nullptr && auxBranch_ != nullptr && metaTree_ != nullptr) { // backward compatibility
109  if (branchEntryInfoBranch_ != nullptr || infoTree_ != nullptr) return true; // backward compatibility
110  return (entries_ == metaTree_->GetEntries() && tree_->GetNbranches() <= metaTree_->GetNbranches() + 1); // backward compatibility
111  } // backward compatibility
112  return false;
113  }
114 
117  rootDelayedReader_->reset();
118  return rootDelayedReader_.get();
119  }
120 
121  void
123  assert(isValid());
124  if(tree_->GetBranch(oldBranchName.c_str()) == nullptr){
125  prod.setDropped(true);
126  }
127  }
128 
129  void
131  BranchDescription const& prod,
132  std::string const& oldBranchName) {
133  assert(isValid());
134  //use the translated branch name
135  TBranch* branch = tree_->GetBranch(oldBranchName.c_str());
137  info.productBranch_ = nullptr;
138  if (prod.present()) {
139  info.productBranch_ = branch;
140  //we want the new branch name for the JobReport
141  branchNames_.push_back(prod.branchName());
142  }
143  TTree* provTree = (metaTree_ != nullptr ? metaTree_ : tree_);
144  info.provenanceBranch_ = provTree->GetBranch(oldBranchName.c_str());
145  branches_->insert(std::make_pair(key, info));
146  }
147 
148  void
149  RootTree::dropBranch(std::string const& oldBranchName) {
150  //use the translated branch name
151  TBranch* branch = tree_->GetBranch(oldBranchName.c_str());
152  if (branch != nullptr) {
153  TObjArray* leaves = tree_->GetListOfLeaves();
154  int entries = leaves->GetEntries();
155  for (int i = 0; i < entries; ++i) {
156  TLeaf* leaf = (TLeaf*)(*leaves)[i];
157  if (leaf == nullptr) continue;
158  TBranch* br = leaf->GetBranch();
159  if (br == nullptr) continue;
160  if (br->GetMother() == branch) {
161  leaves->Remove(leaf);
162  }
163  }
164  leaves->Compress();
165  tree_->GetListOfBranches()->Remove(branch);
166  tree_->GetListOfBranches()->Compress();
167  delete branch;
168  }
169  }
170 
171  roottree::BranchMap const&
172  RootTree::branches() const {return *branches_;}
173 
174  void
175  RootTree::setCacheSize(unsigned int cacheSize) {
176  cacheSize_ = cacheSize;
177  tree_->SetCacheSize(static_cast<Long64_t>(cacheSize));
178  treeCache_.reset(dynamic_cast<TTreeCache*>(filePtr_->GetCacheRead()));
179  if(treeCache_) treeCache_->SetEnablePrefetching(enablePrefetching_);
180  filePtr_->SetCacheRead(0);
181  rawTreeCache_.reset();
182  }
183 
184  void
185  RootTree::setTreeMaxVirtualSize(int treeMaxVirtualSize) {
186  if (treeMaxVirtualSize >= 0) tree_->SetMaxVirtualSize(static_cast<Long64_t>(treeMaxVirtualSize));
187  }
188 
189  void
191  filePtr_->SetCacheRead(treeCache_.get());
192 
193  // Detect a backward skip. If the skip is sufficiently large, we roll the dice and reset the treeCache.
194  // This will cause some amount of over-reading: we pre-fetch all the events in some prior cluster.
195  // However, because reading one event in the cluster is supposed to be equivalent to reading all events in the cluster,
196  // we're not incurring additional over-reading - we're just doing it more efficiently.
197  // NOTE: Constructor guarantees treeAutoFlush_ is positive, even if TTree->GetAutoFlush() is negative.
198  if ((theEntryNumber < static_cast<EntryNumber>(entryNumber_-treeAutoFlush_)) &&
199  (treeCache_) && (!treeCache_->IsLearning()) && (entries_ > 0) && (switchOverEntry_ >= 0)) {
200  treeCache_->SetEntryRange(theEntryNumber, entries_);
201  treeCache_->FillBuffer();
202  }
203 
204  entryNumber_ = theEntryNumber;
205  tree_->LoadTree(entryNumber_);
206  filePtr_->SetCacheRead(0);
207  if(treeCache_ && trainNow_ && entryNumber_ >= 0) {
208  startTraining();
209  trainNow_ = false;
210  trainedSet_.clear();
211  triggerSet_.clear();
213  }
214  if (treeCache_ && treeCache_->IsLearning() && switchOverEntry_ >= 0 && entryNumber_ >= switchOverEntry_) {
215  stopTraining();
216  }
217  }
218 
219  // The actual implementation is done below; it's split in this strange
220  // manner in order to keep a by-definition-rare code path out of the instruction cache.
221  inline TTreeCache*
222  RootTree::checkTriggerCache(TBranch* branch, EntryNumber entryNumber) const {
223  if (!treeCache_->IsAsyncReading() && enableTriggerCache_ && (trainedSet_.find(branch) == trainedSet_.end())) {
224  return checkTriggerCacheImpl(branch, entryNumber);
225  } else {
226  return NULL;
227  }
228  }
229 
230  // See comments in the header. If this function is called, we already know
231  // the trigger cache is active and it was a cache miss for the regular cache.
232  TTreeCache*
233  RootTree::checkTriggerCacheImpl(TBranch* branch, EntryNumber entryNumber) const {
234  // This branch is not going to be in the cache.
235  // Assume this is a "trigger pattern".
236  // Always make sure the branch is added to the trigger set.
237  if (triggerSet_.find(branch) == triggerSet_.end()) {
238  triggerSet_.insert(branch);
239  if (triggerTreeCache_.get()) { triggerTreeCache_->AddBranch(branch, kTRUE); }
240  }
241 
242  if (rawTriggerSwitchOverEntry_ < 0) {
243  // The trigger has never fired before. Take everything not in the
244  // trainedSet and load it from disk
245 
246  // Calculate the end of the next cluster; triggers in the next cluster
247  // will use the triggerCache, not the rawTriggerCache.
248  TTree::TClusterIterator clusterIter = tree_->GetClusterIterator(entryNumber);
249  while (rawTriggerSwitchOverEntry_ < entryNumber) {
250  rawTriggerSwitchOverEntry_ = clusterIter();
251  }
252 
253  // ROOT will automatically expand the cache to fit one cluster; hence, we use
254  // 5 MB as the cache size below
255  tree_->SetCacheSize(static_cast<Long64_t>(5*1024*1024));
256  rawTriggerTreeCache_.reset(dynamic_cast<TTreeCache*>(filePtr_->GetCacheRead()));
257  if(rawTriggerTreeCache_) rawTriggerTreeCache_->SetEnablePrefetching(false);
258  TObjArray *branches = tree_->GetListOfBranches();
259  int branchCount = branches->GetEntriesFast();
260 
261  // Train the rawTriggerCache to have everything not in the regular cache.
262  rawTriggerTreeCache_->SetLearnEntries(0);
263  rawTriggerTreeCache_->SetEntryRange(entryNumber, rawTriggerSwitchOverEntry_);
264  for (int i=0;i<branchCount;i++) {
265  TBranch *tmp_branch = (TBranch*)branches->UncheckedAt(i);
266  if (trainedSet_.find(tmp_branch) != trainedSet_.end()) {
267  continue;
268  }
269  rawTriggerTreeCache_->AddBranch(tmp_branch, kTRUE);
270  }
271  performedSwitchOver_ = false;
272  rawTriggerTreeCache_->StopLearningPhase();
273  filePtr_->SetCacheRead(0);
274 
275  return rawTriggerTreeCache_.get();
277  // The raw trigger has fired and it contents are valid.
278  return rawTriggerTreeCache_.get();
279  } else if (rawTriggerSwitchOverEntry_ > 0) {
280  // The raw trigger has fired, but we are out of the cache. Use the
281  // triggerCache instead.
282  if (!performedSwitchOver_) {
283  rawTriggerTreeCache_.reset();
284  performedSwitchOver_ = true;
285 
286  // Train the triggerCache
287  tree_->SetCacheSize(static_cast<Long64_t>(5*1024*1024));
288  triggerTreeCache_.reset(dynamic_cast<TTreeCache*>(filePtr_->GetCacheRead()));
289  triggerTreeCache_->SetEnablePrefetching(false);
290  triggerTreeCache_->SetLearnEntries(0);
291  triggerTreeCache_->SetEntryRange(entryNumber, tree_->GetEntries());
292  for(std::unordered_set<TBranch*>::const_iterator it = triggerSet_.begin(), itEnd = triggerSet_.end();
293  it != itEnd;
294  it++)
295  {
296  triggerTreeCache_->AddBranch(*it, kTRUE);
297  }
298  triggerTreeCache_->StopLearningPhase();
299  filePtr_->SetCacheRead(0);
300  }
301  return triggerTreeCache_.get();
303  // The raw trigger has fired and it contents are valid.
304  return rawTriggerTreeCache_.get();
305  } else if (rawTriggerSwitchOverEntry_ > 0) {
306  // The raw trigger has fired, but we are out of the cache. Use the
307  // triggerCache instead.
308  if (!performedSwitchOver_) {
309  rawTriggerTreeCache_.reset();
310  performedSwitchOver_ = true;
311 
312  // Train the triggerCache
313  tree_->SetCacheSize(static_cast<Long64_t>(5*1024*1024));
314  triggerTreeCache_.reset(dynamic_cast<TTreeCache*>(filePtr_->GetCacheRead()));
315  triggerTreeCache_->SetEnablePrefetching(false);
316  triggerTreeCache_->SetLearnEntries(0);
317  triggerTreeCache_->SetEntryRange(entryNumber, tree_->GetEntries());
318  for(std::unordered_set<TBranch*>::const_iterator it = triggerSet_.begin(), itEnd = triggerSet_.end();
319  it != itEnd;
320  it++)
321  {
322  triggerTreeCache_->AddBranch(*it, kTRUE);
323  }
324  triggerTreeCache_->StopLearningPhase();
325  filePtr_->SetCacheRead(0);
326  }
327  return triggerTreeCache_.get();
328  }
329 
330  // By construction, this case should be impossible.
331  assert (false);
332  return NULL;
333  }
334 
335  inline TTreeCache*
336  RootTree::selectCache(TBranch* branch, EntryNumber entryNumber) const {
337  TTreeCache *triggerCache = NULL;
338  if (!treeCache_) {
339  return NULL;
340  } else if (treeCache_->IsLearning() && rawTreeCache_) {
341  treeCache_->AddBranch(branch, kTRUE);
342  trainedSet_.insert(branch);
343  return rawTreeCache_.get();
344  } else if ((triggerCache = checkTriggerCache(branch, entryNumber))) {
345  // A NULL return value from checkTriggerCache indicates the trigger cache case
346  // does not apply, and we should continue below.
347  return triggerCache;
348  } else {
349  // The "normal" TTreeCache case.
350  return treeCache_.get();
351  }
352  }
353 
354  void
355  RootTree::getEntry(TBranch* branch, EntryNumber entryNumber) const {
356  try {
357  TTreeCache * cache = selectCache(branch, entryNumber);
358  filePtr_->SetCacheRead(cache);
359  branch->GetEntry(entryNumber);
360  filePtr_->SetCacheRead(0);
361  } catch(cms::Exception const& e) {
362  // We make sure the treeCache_ is detached from the file,
363  // so that ROOT does not also delete it.
364  filePtr_->SetCacheRead(0);
366  t.addContext(std::string("Reading branch ")+branch->GetName());
367  throw t;
368  }
369  }
370 
371  bool
372  RootTree::skipEntries(unsigned int& offset) {
373  entryNumber_ += offset;
374  bool retval = (entryNumber_ < entries_);
375  if(retval) {
376  offset = 0;
377  } else {
378  // Not enough entries in the file to skip.
379  // The +1 is needed because entryNumber_ is -1 at the initialization of the tree, not 0.
380  long long overshoot = entryNumber_ + 1 - entries_;
382  offset = overshoot;
383  }
384  return retval;
385  }
386 
387  void
389  if (cacheSize_ == 0) {
390  return;
391  }
395  treeCache_->SetLearnEntries(learningEntries_);
396  tree_->SetCacheSize(static_cast<Long64_t>(cacheSize_));
397  rawTreeCache_.reset(dynamic_cast<TTreeCache *>(filePtr_->GetCacheRead()));
398  rawTreeCache_->SetEnablePrefetching(false);
399  filePtr_->SetCacheRead(0);
400  rawTreeCache_->SetLearnEntries(0);
402  rawTreeCache_->StartLearningPhase();
404  rawTreeCache_->AddBranch("*", kTRUE);
405  rawTreeCache_->StopLearningPhase();
406  treeCache_->StartLearningPhase();
407  treeCache_->SetEntryRange(switchOverEntry_, tree_->GetEntries());
408  treeCache_->AddBranch(poolNames::branchListIndexesBranchName().c_str(), kTRUE);
409  treeCache_->AddBranch(BranchTypeToAuxiliaryBranchName(branchType_).c_str(), kTRUE);
410  trainedSet_.clear();
411  triggerSet_.clear();
412  assert(treeCache_->GetTree() == tree_);
413  }
414 
415  void
417  filePtr_->SetCacheRead(treeCache_.get());
418  treeCache_->StopLearningPhase();
419  filePtr_->SetCacheRead(0);
420  rawTreeCache_.reset();
421  }
422 
423  void
425  // The TFile is about to be closed, and destructed.
426  // Just to play it safe, zero all pointers to quantities that are owned by the TFile.
428  tree_ = metaTree_ = infoTree_ = nullptr;
429  // We own the treeCache_.
430  // We make sure the treeCache_ is detached from the file,
431  // so that ROOT does not also delete it.
432  filePtr_->SetCacheRead(0);
433  // We *must* delete the TTreeCache here because the TFilePrefetch object
434  // references the TFile. If TFile is closed, before the TTreeCache is
435  // deleted, the TFilePrefetch may continue to do TFile operations, causing
436  // deadlocks or exceptions.
437  treeCache_.reset();
438  rawTreeCache_.reset();
439  triggerTreeCache_.reset();
440  rawTriggerTreeCache_.reset();
441  // We give up our shared ownership of the TFile itself.
442  filePtr_.reset();
443  }
444 
445  void
446  RootTree::trainCache(char const* branchNames) {
447  if (cacheSize_ == 0) {
448  return;
449  }
450  tree_->LoadTree(0);
452  filePtr_->SetCacheRead(treeCache_.get());
453  treeCache_->StartLearningPhase();
454  treeCache_->SetEntryRange(0, tree_->GetEntries());
455  treeCache_->AddBranch(branchNames, kTRUE);
456  treeCache_->StopLearningPhase();
457  assert(treeCache_->GetTree() == tree_);
458  // We own the treeCache_.
459  // We make sure the treeCache_ is detached from the file,
460  // so that ROOT does not also delete it.
461  filePtr_->SetCacheRead(0);
462 
463  // Must also manually add things to the trained set.
464  TObjArray *branches = tree_->GetListOfBranches();
465  int branchCount = branches->GetEntriesFast();
466  for (int i=0;i<branchCount;i++) {
467  TBranch *branch = (TBranch*)branches->UncheckedAt(i);
468  if ((branchNames[0] == '*') || (strcmp(branchNames, branch->GetName()) == 0)) {
469  trainedSet_.insert(branch);
470  }
471  }
472 
473  }
474 
475  namespace roottree {
476  Int_t
477  getEntry(TBranch* branch, EntryNumber entryNumber) {
478  Int_t n = 0;
479  try {
480  n = branch->GetEntry(entryNumber);
481  }
482  catch(cms::Exception const& e) {
483  throw Exception(errors::FileReadError, "", e);
484  }
485  return n;
486  }
487 
488  Int_t
489  getEntry(TTree* tree, EntryNumber entryNumber) {
490  Int_t n = 0;
491  try {
492  n = tree->GetEntry(entryNumber);
493  }
494  catch(cms::Exception const& e) {
495  throw Exception (errors::FileReadError, "", e);
496  }
497  return n;
498  }
499 
500  std::unique_ptr<TTreeCache>
501  trainCache(TTree* tree, InputFile& file, unsigned int cacheSize, char const* branchNames) {
502  tree->LoadTree(0);
503  tree->SetCacheSize(cacheSize);
504  std::unique_ptr<TTreeCache> treeCache(dynamic_cast<TTreeCache*>(file.GetCacheRead()));
505  if (nullptr != treeCache.get()) {
506  treeCache->StartLearningPhase();
507  treeCache->SetEntryRange(0, tree->GetEntries());
508  treeCache->AddBranch(branchNames, kTRUE);
509  treeCache->StopLearningPhase();
510  }
511  // We own the treeCache_.
512  // We make sure the treeCache_ is detached from the file,
513  // so that ROOT does not also delete it.
514  file.SetCacheRead(0);
515  return treeCache;
516  }
517  }
518 }
EntryNumber entryNumber_
Definition: RootTree.h:179
Int_t getEntry(TBranch *branch, EntryNumber entryNumber)
Definition: RootTree.cc:477
int i
Definition: DBlmapReader.cc:9
std::string const & branchName() const
std::string const & BranchTypeToMetaDataTreeName(BranchType const &branchType)
Definition: BranchType.cc:107
std::string const & BranchTypeToAuxiliaryBranchName(BranchType const &branchType)
Definition: BranchType.cc:115
unsigned int learningEntries_
Definition: RootTree.h:187
InputType
Definition: InputType.h:5
static const TGPicture * info(bool iBackgroundIsBlack)
TTreeCache * checkTriggerCache(TBranch *branch, EntryNumber entryNumber) const
Definition: RootTree.cc:222
void addBranch(BranchKey const &key, BranchDescription const &prod, std::string const &oldBranchName)
Definition: RootTree.cc:130
std::shared_ptr< TTreeCache > rawTriggerTreeCache_
Definition: RootTree.h:175
void dropBranch(std::string const &oldBranchName)
Definition: RootTree.cc:149
std::vector< std::string > branchNames_
Definition: RootTree.h:181
std::shared_ptr< TTreeCache > treeCache_
Definition: RootTree.h:172
TTree * tree_
Definition: RootTree.h:165
bool trainNow_
Definition: RootTree.h:183
std::shared_ptr< BranchMap > branches_
Definition: RootTree.h:182
EntryNumber rawTriggerSwitchOverEntry_
Definition: RootTree.h:185
TFileCacheRead * GetCacheRead() const
Definition: InputFile.h:46
assert(m_qm.get())
roottree::BranchMap BranchMap
Definition: RootTree.h:63
EntryNumber const & entries() const
Definition: RootTree.h:96
TTree * metaTree_
Definition: RootTree.h:166
std::unordered_set< TBranch * > trainedSet_
Definition: RootTree.h:176
#define NULL
Definition: scimark2.h:8
void stopTraining()
Definition: RootTree.cc:416
TBranch * branchEntryInfoBranch_
Definition: RootTree.h:196
void setPresence(BranchDescription &prod, std::string const &oldBranchName)
Definition: RootTree.cc:122
EntryNumber entries_
Definition: RootTree.h:178
#define nullptr
bool enablePrefetching_
Definition: RootTree.h:192
void trainCache(char const *branchNames)
Definition: RootTree.cc:446
void setTreeMaxVirtualSize(int treeMaxVirtualSize)
Definition: RootTree.cc:185
std::map< BranchKey const, BranchInfo > BranchMap
Definition: RootTree.h:55
BranchType
Definition: BranchType.h:11
void insertEntryForIndex(unsigned int index)
Definition: RootTree.cc:98
tuple leaf
Definition: Node.py:62
TTreeCache * selectCache(TBranch *branch, EntryNumber entryNumber) const
Definition: RootTree.cc:336
unsigned long treeAutoFlush_
Definition: RootTree.h:189
std::string const & BranchTypeToBranchEntryInfoBranchName(BranchType const &branchType)
Definition: BranchType.cc:127
void setDropped(bool isDropped)
std::string const & BranchTypeToProductTreeName(BranchType const &branchType)
Definition: BranchType.cc:103
IndexIntoFile::EntryNumber_t EntryNumber
Definition: RootTree.h:41
TTreeCache * checkTriggerCacheImpl(TBranch *branch, EntryNumber entryNumber) const
Definition: RootTree.cc:233
TBranch * auxBranch_
Definition: RootTree.h:168
EntryNumber const & entryNumber() const
Definition: RootTree.h:94
std::string const & BranchTypeToInfoTreeName(BranchType const &branchType)
Definition: BranchType.cc:111
TBranch * provenanceBranch_
Definition: RootTree.h:51
bool enableTriggerCache_
Definition: RootTree.h:193
DelayedReader * rootDelayedReader() const
Definition: RootTree.cc:116
EntryNumber const & entryNumberForIndex(unsigned int index) const
Definition: RootTree.cc:92
std::shared_ptr< TTreeCache > rawTreeCache_
Definition: RootTree.h:173
void close()
Definition: RootTree.cc:424
void getEntry(TBranch *branch, EntryNumber entry) const
Definition: RootTree.cc:355
void startTraining()
Definition: RootTree.cc:388
void addContext(std::string const &context)
Definition: Exception.cc:227
bool isValid() const
Definition: RootTree.cc:104
std::shared_ptr< InputFile > filePtr_
Definition: RootTree.h:161
TBranch * productBranch_
Definition: RootTree.h:50
void setCacheSize(unsigned int cacheSize)
Definition: RootTree.cc:175
bool performedSwitchOver_
Definition: RootTree.h:186
std::unique_ptr< std::vector< EntryNumber > > entryNumberForIndex_
Definition: RootTree.h:180
unsigned int cacheSize_
Definition: RootTree.h:188
bool skipEntries(unsigned int &offset)
Definition: RootTree.cc:372
std::unordered_set< TBranch * > triggerSet_
Definition: RootTree.h:177
RootTree(std::shared_ptr< InputFile > filePtr, BranchType const &branchType, unsigned int nIndexes, unsigned int maxVirtualSize, unsigned int cacheSize, unsigned int learningEntries, bool enablePrefetching, InputType inputType)
Definition: RootTree.cc:28
std::string const & BranchTypeToAuxBranchName(BranchType const &branchType)
Definition: BranchType.cc:119
BranchMap const & branches() const
Definition: RootTree.cc:172
std::unique_ptr< DelayedReader > rootDelayedReader_
Definition: RootTree.h:194
std::string const & branchListIndexesBranchName()
Definition: BranchType.cc:247
volatile std::atomic< bool > shutdown_flag false
std::unique_ptr< TTreeCache > trainCache(TTree *tree, InputFile &file, unsigned int cacheSize, char const *branchNames)
Definition: RootTree.cc:501
void SetCacheRead(TFileCacheRead *tfcr)
Definition: InputFile.h:47
EntryNumber switchOverEntry_
Definition: RootTree.h:184
std::shared_ptr< TTreeCache > triggerTreeCache_
Definition: RootTree.h:174
tuple size
Write out results.
T get(const Candidate &c)
Definition: component.h:55
BranchType branchType_
Definition: RootTree.h:167
TTree * infoTree_
Definition: RootTree.h:198
void setEntryNumber(EntryNumber theEntryNumber)
Definition: RootTree.cc:190
roottree::EntryNumber EntryNumber
Definition: RootTree.h:64