CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
Scenario.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 """
3 _Scenario_
4 
5 Standard cmsRun Process building interface used for data processing
6 for a particular data scenario.
7 A scenario is a macro-data-taking setting such as cosmic running,
8 beam halo running, or particular validation tests.
9 
10 This class defines the interfaces used by the Tier 0 and Tier 1
11 processing to wrap calls to ConfigBuilder in order to retrieve all the
12 configurations for the various types of job
13 
14 """
15 
16 import FWCore.ParameterSet.Config as cms
17 from Configuration.DataProcessing.Merge import mergeProcess
18 from Configuration.DataProcessing.Repack import repackProcess
19 
20 #central import, will be used by all daughter classes anyways
21 from Configuration.Applications.ConfigBuilder import ConfigBuilder,Options,defaultOptions
22 
23 
25  """
26  _Scenario_
27 
28  """
29  def __init__(self):
30  pass
31 
32 
33  def promptReco(self, globalTag, **options):
34  """
35  _installPromptReco_
36 
37  given a skeleton process object and references
38  to the output modules for the products it produces,
39  install the standard reco sequences and event content for this
40  scenario
41 
42  """
43  msg = "Scenario Implementation %s\n" % self.__class__.__name__
44  msg += "Does not contain an implementation for promptReco"
45  raise NotImplementedError, msg
46 
47 
48  def expressProcessing(self, globalTag, **options):
49  """
50  _expressProcessing_
51 
52  Build an express processing configuration for this scenario.
53 
54  Express processing runs conversion, reco and alca reco on each
55  streamer file in the express stream and writes out RAW, RECO and
56  a combined ALCA file that gets mergepacked in a later step
57 
58  writeTiers is list of tiers to write out, not including ALCA
59 
60  datasets is the list of datasets to split into for each tier
61  written out. Should always be one dataset
62 
63  alcaDataset - if set, this means the combined Alca file is written
64  out with no dataset splitting, it gets assigned straight to the datase
65  provided
66 
67  """
68  msg = "Scenario Implementation %s\n" % self.__class__.__name__
69  msg += "Does not contain an implementation for expressProcessing"
70  raise NotImplementedError, msg
71 
72 
73  def alcaSkim(self, skims, **options):
74  """
75  _alcaSkim_
76 
77  Given a skeleton process install the skim splitting for given skims
78 
79  """
80  msg = "Scenario Implementation %s\n" % self.__class__.__name__
81  msg += "Does not contain an implementation for alcaSkim"
82  raise NotImplementedError, msg
83 
84 
85  def alcaReco(self, *skims, **options):
86  """
87  _alcaSkim_
88 
89  Given a skeleton process install the skim production for given skims
90 
91  """
92  msg = "Scenario Implementation %s\n" % self.__class__.__name__
93  msg += "Does not contain an implementation for alcaReco"
94  raise NotImplementedError, msg
95 
96 
97  def dqmHarvesting(self, datasetName, runNumber, globalTag, **options):
98  """
99  _dqmHarvesting_
100 
101  build a DQM Harvesting configuration
102 
103  Arguments:
104 
105  datasetName - aka workflow name for DQMServer, this is the name of the
106  dataset containing the harvested run
107  runNumber - The run being harvested
108  globalTag - The global tag being used
109  inputFiles - The list of LFNs being harvested
110 
111  """
112  msg = "Scenario Implementation %s\n" % self.__class__.__name__
113  msg += "Does not contain an implementation for dqmHarvesting"
114  raise NotImplementedError, msg
115 
116 
117  def alcaHarvesting(self, globalTag, datasetName, **options):
118  """
119  _alcaHarvesting_
120 
121  build an AlCa Harvesting configuration
122 
123  Arguments:
124 
125  globalTag - The global tag being used
126  inputFiles - The list of LFNs being harvested
127 
128  """
129  msg = "Scenario Implementation %s\n" % self.__class__.__name__
130  msg += "Does not contain an implementation for alcaHarvesting"
131  raise NotImplementedError, msg
132 
133 
134  def skimming(self, skims, globalTag, **options):
135  """
136  _skimming_
137 
138  Given a process install the sequences for Tier 1 skimming
139  and the appropriate output modules
140 
141  """
142  msg = "Scenario Implementation %s\n" % self.__class__.__name__
143  msg += "Does not contain an implementation for skimming"
144  raise NotImplementedError, msg
145 
146 
147  def merge(self, *inputFiles, **options):
148  """
149  _merge_
150 
151  builds a merge configuration
152 
153  """
154  msg = "Scenario Implementation %s\n" % self.__class__.__name__
155  return mergeProcess(*inputFiles, **options)
156 
157 
158  def repack(self, **options):
159  """
160  _repack_
161 
162  builds a repack configuration
163 
164  """
165  msg = "Scenario Implementation %s\n" % self.__class__.__name__
166  return repackProcess(**options)
167 
168 
169  #
170  # helper methods
171  #
172 
173  def dropOutputModule(self, processRef, moduleName):
174  """
175  _dropOutputModule_
176 
177  Util to prune an unwanted output module
178 
179  """
180  del process._Process__outputmodules[moduleName]
181  return
def expressProcessing
Definition: Scenario.py:48
def repackProcess
Definition: Repack.py:12
list object
Definition: dbtoconf.py:77
def mergeProcess
Definition: Merge.py:16