CMS 3D CMS Logo

List of all members | Public Member Functions | Public Attributes
Scenario.Scenario Class Reference
Inheritance diagram for Scenario.Scenario:

Public Member Functions

def __init__ (self)
 
def alcaHarvesting (self, globalTag, datasetName, options)
 
def alcaReco (self, skims, options)
 
def alcaSkim (self, skims, options)
 
def dqmHarvesting (self, datasetName, runNumber, globalTag, options)
 
def dropOutputModule (self, processRef, moduleName)
 
def expressProcessing (self, globalTag, options)
 
def merge (self, inputFiles, options)
 
def promptReco (self, globalTag, options)
 
def repack (self, options)
 
def skimming (self, skims, globalTag, options)
 
def visualizationProcessing (self, globalTag, options)
 

Public Attributes

 eras
 

Detailed Description

_Scenario_

Definition at line 24 of file Scenario.py.

Constructor & Destructor Documentation

◆ __init__()

def Scenario.Scenario.__init__ (   self)

Definition at line 29 of file Scenario.py.

29  def __init__(self):
30  self.eras=cms.Modifier()
31 
32 
def __init__(self, dataset, job_number, job_id, job_name, isDA, isMC, applyBOWS, applyEXTRACOND, extraconditions, runboundary, lumilist, intlumi, maxevents, gt, allFromGT, alignmentDB, alignmentTAG, apeDB, apeTAG, bowDB, bowTAG, vertextype, tracktype, refittertype, ttrhtype, applyruncontrol, ptcut, CMSSW_dir, the_dir)

Member Function Documentation

◆ alcaHarvesting()

def Scenario.Scenario.alcaHarvesting (   self,
  globalTag,
  datasetName,
  options 
)
_alcaHarvesting_

build an AlCa Harvesting configuration

Arguments:

globalTag - The global tag being used
inputFiles - The list of LFNs being harvested

Definition at line 139 of file Scenario.py.

References __class__< T >.__class__().

139  def alcaHarvesting(self, globalTag, datasetName, **options):
140  """
141  _alcaHarvesting_
142 
143  build an AlCa Harvesting configuration
144 
145  Arguments:
146 
147  globalTag - The global tag being used
148  inputFiles - The list of LFNs being harvested
149 
150  """
151  msg = "Scenario Implementation %s\n" % self.__class__.__name__
152  msg += "Does not contain an implementation for alcaHarvesting"
153  raise NotImplementedError(msg)
154 
155 

◆ alcaReco()

def Scenario.Scenario.alcaReco (   self,
  skims,
  options 
)
_alcaSkim_

Given a skeleton process install the skim production for given skims

Definition at line 107 of file Scenario.py.

References __class__< T >.__class__().

107  def alcaReco(self, *skims, **options):
108  """
109  _alcaSkim_
110 
111  Given a skeleton process install the skim production for given skims
112 
113  """
114  msg = "Scenario Implementation %s\n" % self.__class__.__name__
115  msg += "Does not contain an implementation for alcaReco"
116  raise NotImplementedError(msg)
117 
118 

◆ alcaSkim()

def Scenario.Scenario.alcaSkim (   self,
  skims,
  options 
)
_alcaSkim_

Given a skeleton process install the skim splitting for given skims

Definition at line 95 of file Scenario.py.

References __class__< T >.__class__().

95  def alcaSkim(self, skims, **options):
96  """
97  _alcaSkim_
98 
99  Given a skeleton process install the skim splitting for given skims
100 
101  """
102  msg = "Scenario Implementation %s\n" % self.__class__.__name__
103  msg += "Does not contain an implementation for alcaSkim"
104  raise NotImplementedError(msg)
105 
106 

◆ dqmHarvesting()

def Scenario.Scenario.dqmHarvesting (   self,
  datasetName,
  runNumber,
  globalTag,
  options 
)
_dqmHarvesting_

build a DQM Harvesting configuration

Arguments:

datasetName - aka workflow name for DQMServer, this is the name of the
dataset containing the harvested run
runNumber - The run being harvested
globalTag - The global tag being used
inputFiles - The list of LFNs being harvested

Definition at line 119 of file Scenario.py.

References __class__< T >.__class__().

119  def dqmHarvesting(self, datasetName, runNumber, globalTag, **options):
120  """
121  _dqmHarvesting_
122 
123  build a DQM Harvesting configuration
124 
125  Arguments:
126 
127  datasetName - aka workflow name for DQMServer, this is the name of the
128  dataset containing the harvested run
129  runNumber - The run being harvested
130  globalTag - The global tag being used
131  inputFiles - The list of LFNs being harvested
132 
133  """
134  msg = "Scenario Implementation %s\n" % self.__class__.__name__
135  msg += "Does not contain an implementation for dqmHarvesting"
136  raise NotImplementedError(msg)
137 
138 

◆ dropOutputModule()

def Scenario.Scenario.dropOutputModule (   self,
  processRef,
  moduleName 
)
_dropOutputModule_

Util to prune an unwanted output module

Definition at line 195 of file Scenario.py.

195  def dropOutputModule(self, processRef, moduleName):
196  """
197  _dropOutputModule_
198 
199  Util to prune an unwanted output module
200 
201  """
202  del process._Process__outputmodules[moduleName]
203  return
204 

◆ expressProcessing()

def Scenario.Scenario.expressProcessing (   self,
  globalTag,
  options 
)
_expressProcessing_

Build an express processing configuration for this scenario.

Express processing runs conversion, reco and alca reco on each
streamer file in the express stream and writes out RAW, RECO and
a combined ALCA file that gets mergepacked in a later step

writeTiers is list of tiers to write out, not including ALCA

datasets is the list of datasets to split into for each tier
written out. Should always be one dataset

alcaDataset - if set, this means the combined Alca file is written
out with no dataset splitting, it gets assigned straight to the datase
provided

Definition at line 48 of file Scenario.py.

References __class__< T >.__class__().

48  def expressProcessing(self, globalTag, **options):
49  """
50  _expressProcessing_
51 
52  Build an express processing configuration for this scenario.
53 
54  Express processing runs conversion, reco and alca reco on each
55  streamer file in the express stream and writes out RAW, RECO and
56  a combined ALCA file that gets mergepacked in a later step
57 
58  writeTiers is list of tiers to write out, not including ALCA
59 
60  datasets is the list of datasets to split into for each tier
61  written out. Should always be one dataset
62 
63  alcaDataset - if set, this means the combined Alca file is written
64  out with no dataset splitting, it gets assigned straight to the datase
65  provided
66 
67  """
68  msg = "Scenario Implementation %s\n" % self.__class__.__name__
69  msg += "Does not contain an implementation for expressProcessing"
70  raise NotImplementedError(msg)
71 
72 
73 

◆ merge()

def Scenario.Scenario.merge (   self,
  inputFiles,
  options 
)
_merge_

builds a merge configuration

Definition at line 169 of file Scenario.py.

References __class__< T >.__class__(), and Merge.mergeProcess().

169  def merge(self, *inputFiles, **options):
170  """
171  _merge_
172 
173  builds a merge configuration
174 
175  """
176  msg = "Scenario Implementation %s\n" % self.__class__.__name__
177  return mergeProcess(*inputFiles, **options)
178 
179 
int merge(int argc, char *argv[])
Definition: DiMuonVmerge.cc:27
def mergeProcess(inputFiles, options)
Definition: Merge.py:16

◆ promptReco()

def Scenario.Scenario.promptReco (   self,
  globalTag,
  options 
)
_installPromptReco_

given a skeleton process object and references
to the output modules for the products it produces,
install the standard reco sequences and event content for this
scenario

Definition at line 33 of file Scenario.py.

References __class__< T >.__class__().

33  def promptReco(self, globalTag, **options):
34  """
35  _installPromptReco_
36 
37  given a skeleton process object and references
38  to the output modules for the products it produces,
39  install the standard reco sequences and event content for this
40  scenario
41 
42  """
43  msg = "Scenario Implementation %s\n" % self.__class__.__name__
44  msg += "Does not contain an implementation for promptReco"
45  raise NotImplementedError(msg)
46 
47 

◆ repack()

def Scenario.Scenario.repack (   self,
  options 
)
_repack_

builds a repack configuration

Definition at line 180 of file Scenario.py.

References __class__< T >.__class__(), and Repack.repackProcess().

180  def repack(self, **options):
181  """
182  _repack_
183 
184  builds a repack configuration
185 
186  """
187  msg = "Scenario Implementation %s\n" % self.__class__.__name__
188  return repackProcess(**options)
189 
190 
def repackProcess(args)
Definition: Repack.py:12

◆ skimming()

def Scenario.Scenario.skimming (   self,
  skims,
  globalTag,
  options 
)
_skimming_

Given a process install the sequences for Tier 1 skimming
and the appropriate output modules

Definition at line 156 of file Scenario.py.

References __class__< T >.__class__().

156  def skimming(self, skims, globalTag, **options):
157  """
158  _skimming_
159 
160  Given a process install the sequences for Tier 1 skimming
161  and the appropriate output modules
162 
163  """
164  msg = "Scenario Implementation %s\n" % self.__class__.__name__
165  msg += "Does not contain an implementation for skimming"
166  raise NotImplementedError(msg)
167 
168 

◆ visualizationProcessing()

def Scenario.Scenario.visualizationProcessing (   self,
  globalTag,
  options 
)
_expressProcessing_

Build a configuration for the visualization processing for this scenario.

Visualization processing runs unpacking, and reco on 
streamer files and it is equipped to run on the online cluster
and writes RECO or FEVT files,

writeTiers is list of tiers to write out.

Definition at line 74 of file Scenario.py.

References __class__< T >.__class__().

74  def visualizationProcessing(self, globalTag, **options):
75  """
76  _expressProcessing_
77 
78  Build a configuration for the visualization processing for this scenario.
79 
80  Visualization processing runs unpacking, and reco on
81  streamer files and it is equipped to run on the online cluster
82  and writes RECO or FEVT files,
83 
84  writeTiers is list of tiers to write out.
85 
86 
87  """
88  msg = "Scenario Implementation %s\n" % self.__class__.__name__
89  msg += "Does not contain an implementation for visualizationProcessing"
90  raise NotImplementedError(msg)
91 
92 
93 
94 

Member Data Documentation

◆ eras