CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
List of all members | Public Member Functions
Scenario.Scenario Class Reference
Inheritance diagram for Scenario.Scenario:

Public Member Functions

def __init__
 
def addExpressOutputModules
 
def alcaHarvesting
 
def alcaReco
 
def alcaSkim
 
def dqmHarvesting
 
def dropOutputModule
 
def expressProcessing
 
def merge
 
def promptReco
 
def skimming
 

Detailed Description

_Scenario_

Definition at line 19 of file Scenario.py.

Constructor & Destructor Documentation

def Scenario.Scenario.__init__ (   self)

Definition at line 24 of file Scenario.py.

24 
25  def __init__(self):
26  pass
27 

Member Function Documentation

def Scenario.Scenario.addExpressOutputModules (   self,
  process,
  tiers,
  datasets 
)
_addExpressOutputModules_

Util method to unpack and install the set of data tier
output modules corresponding to the list of tiers and datasets
provided

Definition at line 168 of file Scenario.py.

169  def addExpressOutputModules(self, process, tiers, datasets):
170  """
171  _addExpressOutputModules_
172 
173  Util method to unpack and install the set of data tier
174  output modules corresponding to the list of tiers and datasets
175  provided
176 
177  """
178  for tier in tiers:
179  for dataset in datasets:
180  moduleName = "write%s%s" % (tier, dataset)
181  contentName = "%sEventContent" % tier
182  contentAttr = getattr(process, contentName)
183  setattr(process, moduleName,
184 
185  cms.OutputModule(
186  "PoolOutputModule",
187  fileName = cms.untracked.string('%s.root' % moduleName),
188  dataset = cms.untracked.PSet(
189  dataTier = cms.untracked.string(tier),
190  ),
191  eventContent = contentAttr
192  )
193 
194  )
195  return
196 
197 
def addExpressOutputModules
Definition: Scenario.py:168
def Scenario.Scenario.alcaHarvesting (   self,
  globalTag,
  datasetName,
  options 
)
_alcaHarvesting_

build an AlCa Harvesting configuration

Arguments:

globalTag - The global tag being used
inputFiles - The list of LFNs being harvested

Definition at line 112 of file Scenario.py.

113  def alcaHarvesting(self, globalTag, datasetName, **options):
114  """
115  _alcaHarvesting_
116 
117  build an AlCa Harvesting configuration
118 
119  Arguments:
120 
121  globalTag - The global tag being used
122  inputFiles - The list of LFNs being harvested
123 
124  """
125  msg = "Scenario Implementation %s\n" % self.__class__.__name__
126  msg += "Does not contain an implementation for alcaHarvesting"
127  raise NotImplementedError, msg
128 
def Scenario.Scenario.alcaReco (   self,
  skims,
  options 
)
_alcaSkim_

Given a skeleton process install the skim production for given skims

Definition at line 80 of file Scenario.py.

80 
81  def alcaReco(self, *skims, **options):
82  """
83  _alcaSkim_
84 
85  Given a skeleton process install the skim production for given skims
86 
87  """
88  msg = "Scenario Implementation %s\n" % self.__class__.__name__
89  msg += "Does not contain an implementation for alcaReco"
90  raise NotImplementedError, msg
91 
def Scenario.Scenario.alcaSkim (   self,
  skims,
  options 
)
_alcaSkim_

Given a skeleton process install the skim splitting for given skims

Definition at line 68 of file Scenario.py.

68 
69  def alcaSkim(self, skims, **options):
70  """
71  _alcaSkim_
72 
73  Given a skeleton process install the skim splitting for given skims
74 
75  """
76  msg = "Scenario Implementation %s\n" % self.__class__.__name__
77  msg += "Does not contain an implementation for alcaSkim"
78  raise NotImplementedError, msg
79 
def Scenario.Scenario.dqmHarvesting (   self,
  datasetName,
  runNumber,
  globalTag,
  options 
)
_dqmHarvesting_

build a DQM Harvesting configuration

Arguments:

datasetName - aka workflow name for DQMServer, this is the name of the
dataset containing the harvested run
runNumber - The run being harvested
globalTag - The global tag being used
inputFiles - The list of LFNs being harvested

Definition at line 92 of file Scenario.py.

92 
93  def dqmHarvesting(self, datasetName, runNumber, globalTag, **options):
94  """
95  _dqmHarvesting_
96 
97  build a DQM Harvesting configuration
98 
99  Arguments:
100 
101  datasetName - aka workflow name for DQMServer, this is the name of the
102  dataset containing the harvested run
103  runNumber - The run being harvested
104  globalTag - The global tag being used
105  inputFiles - The list of LFNs being harvested
106 
107  """
108  msg = "Scenario Implementation %s\n" % self.__class__.__name__
109  msg += "Does not contain an implementation for dqmHarvesting"
110  raise NotImplementedError, msg
111 
def Scenario.Scenario.dropOutputModule (   self,
  processRef,
  moduleName 
)
_dropOutputModule_

Util to prune an unwanted output module

Definition at line 157 of file Scenario.py.

Referenced by Impl.preprodmc.preprodmc.alcaReco(), Impl.relvalmcfs.relvalmcfs.alcaReco(), Impl.relvalmc.relvalmc.alcaReco(), and Impl.prodmc.prodmc.alcaReco().

158  def dropOutputModule(self, processRef, moduleName):
159  """
160  _dropOutputModule_
161 
162  Util to prune an unwanted output module
163 
164  """
165  del process._Process__outputmodules[moduleName]
166  return
167 
def Scenario.Scenario.expressProcessing (   self,
  globalTag,
  writeTiers = [],
  options 
)
_expressProcessing_

Build an express processing configuration for this scenario.

Express processing runs conversion, reco and alca reco on each
streamer file in the express stream and writes out RAW, RECO and
a combined ALCA file that gets mergepacked in a later step

writeTiers is list of tiers to write out, not including ALCA

datasets is the list of datasets to split into for each tier
written out. Should always be one dataset

alcaDataset - if set, this means the combined Alca file is written
out with no dataset splitting, it gets assigned straight to the datase
provided

Definition at line 43 of file Scenario.py.

43 
44  def expressProcessing(self, globalTag, writeTiers = [], **options):
45  """
46  _expressProcessing_
47 
48  Build an express processing configuration for this scenario.
49 
50  Express processing runs conversion, reco and alca reco on each
51  streamer file in the express stream and writes out RAW, RECO and
52  a combined ALCA file that gets mergepacked in a later step
53 
54  writeTiers is list of tiers to write out, not including ALCA
55 
56  datasets is the list of datasets to split into for each tier
57  written out. Should always be one dataset
58 
59  alcaDataset - if set, this means the combined Alca file is written
60  out with no dataset splitting, it gets assigned straight to the datase
61  provided
62 
63  """
64  msg = "Scenario Implementation %s\n" % self.__class__.__name__
65  msg += "Does not contain an implementation for expressProcessing"
66  raise NotImplementedError, msg
67 
def expressProcessing
Definition: Scenario.py:43
def Scenario.Scenario.merge (   self,
  inputFiles,
  options 
)
_merge_

builds a merge configuration

Definition at line 142 of file Scenario.py.

References Merge.mergeProcess().

143  def merge(self, *inputFiles, **options):
144  """
145  _merge_
146 
147  builds a merge configuration
148 
149  """
150  msg = "Scenario Implementation %s\n" % self.__class__.__name__
151  return mergeProcess(*inputFiles, **options)
152 
def mergeProcess
Definition: Merge.py:16
def Scenario.Scenario.promptReco (   self,
  globalTag,
  writeTiers = ['RECO'],
  options 
)
_installPromptReco_

given a skeleton process object and references
to the output modules for the products it produces,
install the standard reco sequences and event content for this
scenario

Definition at line 28 of file Scenario.py.

28 
29  def promptReco(self, globalTag, writeTiers = ['RECO'], **options):
30  """
31  _installPromptReco_
32 
33  given a skeleton process object and references
34  to the output modules for the products it produces,
35  install the standard reco sequences and event content for this
36  scenario
37 
38  """
39  msg = "Scenario Implementation %s\n" % self.__class__.__name__
40  msg += "Does not contain an implementation for promptReco"
41  raise NotImplementedError, msg
42 
def Scenario.Scenario.skimming (   self,
  skims,
  options 
)
_skimming_

Given a process install the sequences for Tier 1 skimming
and the appropriate output modules

Definition at line 129 of file Scenario.py.

130  def skimming(self, skims, **options):
131  """
132  _skimming_
133 
134  Given a process install the sequences for Tier 1 skimming
135  and the appropriate output modules
136 
137  """
138  msg = "Scenario Implementation %s\n" % self.__class__.__name__
139  msg += "Does not contain an implementation for skimming"
140  raise NotImplementedError, msg
141