CMS 3D CMS Logo

Reco.py
Go to the documentation of this file.
1 #!/usr/bin/env python3
2 """
3 _pp_
4 
5 Scenario supporting proton collisions
6 
7 """
8 from __future__ import print_function
9 
10 import os
11 import sys
12 
14 from Configuration.DataProcessing.Utils import stepALCAPRODUCER,stepSKIMPRODUCER,addMonitoring,dictIO,dqmIOSource,harvestingMode,dqmSeq,gtNameAndConnect
15 import FWCore.ParameterSet.Config as cms
16 from Configuration.DataProcessing.RecoTLR import customisePrompt,customiseExpress
17 
18 class Reco(Scenario):
19  def __init__(self):
20  Scenario.__init__(self)
21  self.recoSeq=''
22  self.cbSc=self.__class__.__name__
23  self.promptModifiers = cms.ModifierChain()
24  self.expressModifiers = cms.ModifierChain()
25  self.visModifiers = cms.ModifierChain()
26  """
27  _pp_
28 
29  Implement configuration building for data processing for proton
30  collision data taking
31 
32  """
33 
34 
35  def _checkRepackedFlag(self, options, **args):
36  if 'repacked' in args:
37  if args['repacked'] == True:
38  options.isRepacked = True
39  else:
40  options.isRepacked = False
41 
42 
43 
44  def promptReco(self, globalTag, **args):
45  """
46  _promptReco_
47 
48  Proton collision data taking prompt reco
49 
50  """
51  step = stepALCAPRODUCER(args['skims'])
52  PhysicsSkimStep = ''
53  if ("PhysicsSkims" in args) :
54  PhysicsSkimStep = stepSKIMPRODUCER(args['PhysicsSkims'])
55  dqmStep = dqmSeq(args,'')
56  options = Options()
57  options.__dict__.update(defaultOptions.__dict__)
58  options.scenario = self.cbSc
59  if ('nThreads' in args) :
60  options.nThreads=args['nThreads']
61 
62  miniAODStep = ''
63  nanoAODStep = ''
64  if not 'customs' in args:
65  args['customs']=[]
66 
67  if 'outputs' in args:
68  print(args['outputs'])
69  for a in args['outputs']:
70  if a['dataTier'] == 'MINIAOD':
71  miniAODStep = ',PAT'
72  if a['dataTier'] in ['NANOAOD', 'NANOEDMAOD']:
73  nanoAODStep = ',NANO'
74  args['customs'].append('PhysicsTools/NanoAOD/nano_cff.nanoL1TrigObjCustomize')
75 
76  self._checkRepackedFlag(options, **args)
77 
78  if 'customs' in args:
79  options.customisation_file=args['customs']
80 
81  eiStep=''
82 
83  options.step = 'RAW2DIGI,L1Reco,RECO'
84  options.step += self.recoSeq + eiStep + step + PhysicsSkimStep
85  options.step += miniAODStep + nanoAODStep
86  options.step += ',DQM' + dqmStep + ',ENDJOB'
87 
88  dictIO(options,args)
89  options.conditions = gtNameAndConnect(globalTag, args)
90 
91  process = cms.Process('RECO', cms.ModifierChain(self.eras, self.promptModifiers) )
92  cb = ConfigBuilder(options, process = process, with_output = True)
93 
94  # Input source
95  process.source = cms.Source("PoolSource",
96  fileNames = cms.untracked.vstring()
97  )
98  cb.prepare()
99 
100  addMonitoring(process)
101 
102  return process
103 
104 
105  def expressProcessing(self, globalTag, **args):
106  """
107  _expressProcessing_
108 
109  Proton collision data taking express processing
110 
111  """
112  skims = args['skims']
113  # the AlCaReco skims for PCL should only run during AlCaSkimming step which uses the same configuration on the Tier0 side, for this reason we drop them here
114  pclWkflws = [x for x in skims if "PromptCalibProd" in x]
115  for wfl in pclWkflws:
116  skims.remove(wfl)
117 
118  step = stepALCAPRODUCER(skims)
119  dqmStep= dqmSeq(args,'')
120  options = Options()
121  options.__dict__.update(defaultOptions.__dict__)
122  options.scenario = self.cbSc
123  if ('nThreads' in args) :
124  options.nThreads=args['nThreads']
125 
126  eiStep=''
127 
128  options.step = 'RAW2DIGI,L1Reco,RECO'+self.recoSeq+eiStep+step+',DQM'+dqmStep+',ENDJOB'
129 
130  dictIO(options,args)
131  options.conditions = gtNameAndConnect(globalTag, args)
132 
133 
134  options.filein = 'tobeoverwritten.xyz'
135  if 'inputSource' in args:
136  options.filetype = args['inputSource']
137  process = cms.Process('RECO', cms.ModifierChain(self.eras, self.expressModifiers) )
138 
139  if 'customs' in args:
140  options.customisation_file=args['customs']
141 
142  self._checkRepackedFlag(options,**args)
143 
144  cb = ConfigBuilder(options, process = process, with_output = True, with_input = True)
145 
146  cb.prepare()
147 
148  addMonitoring(process)
149 
150  return process
151 
152 
153  def visualizationProcessing(self, globalTag, **args):
154  """
155  _visualizationProcessing_
156 
157  """
158 
159  options = Options()
160  options.__dict__.update(defaultOptions.__dict__)
161  options.scenario = self.cbSc
162  # FIXME: do we need L1Reco here?
163  options.step =''
164  if 'preFilter' in args:
165  options.step +='FILTER:'+args['preFilter']+','
166 
167  eiStep=''
168 
169  if 'beamSplashRun' in args:
170  options.step += 'RAW2DIGI,L1Reco,RECO'+args['beamSplashRun']+',ENDJOB'
171  print("Using RECO%s step in visualizationProcessing" % args['beamSplashRun'])
172  else :
173  options.step += 'RAW2DIGI,L1Reco,RECO'+eiStep+',ENDJOB'
174 
175 
176 
177  dictIO(options,args)
178  options.conditions = gtNameAndConnect(globalTag, args)
179  options.timeoutOutput = True
180  # FIXME: maybe can go...maybe not
181  options.filein = 'tobeoverwritten.xyz'
182 
183  if 'inputSource' in args:
184  options.filetype = args['inputSource']
185  else:
186  # this is the default as this is what is needed on the OnlineCluster
187  options.filetype = 'DQMDAQ'
188 
189  print("Using %s source"%options.filetype)
190 
191  process = cms.Process('RECO', cms.ModifierChain(self.eras, self.visModifiers) )
192 
193  if 'customs' in args:
194  options.customisation_file=args['customs']
195 
196  self._checkRepackedFlag(options, **args)
197 
198  cb = ConfigBuilder(options, process = process, with_output = True, with_input = True)
199 
200  cb.prepare()
201 
202 
203 
204 
205  # FIXME: not sure abou this one...drop for the moment
206  # addMonitoring(process)
207 
208  return process
209 
210 
211 
212 
213  def alcaSkim(self, skims, **args):
214  """
215  _alcaSkim_
216 
217  AlcaReco processing & skims for proton collisions
218 
219  """
220 
221  step = ""
222  pclWflws = [x for x in skims if "PromptCalibProd" in x]
223  skims = [x for x in skims if x not in pclWflws]
224 
225  if len(pclWflws):
226  step += 'ALCA:'+('+'.join(pclWflws))
227 
228  if len( skims ) > 0:
229  if step != "":
230  step += ","
231  step += "ALCAOUTPUT:"+('+'.join(skims))
232 
233  options = Options()
234  options.__dict__.update(defaultOptions.__dict__)
235  options.scenario = self.cbSc
236  options.step = step
237  options.conditions = args['globaltag'] if 'globaltag' in args else 'None'
238  if 'globalTagConnect' in args and args['globalTagConnect'] != '':
239  options.conditions += ','+args['globalTagConnect']
240 
241  options.triggerResultsProcess = 'RECO'
242 
243  if 'customs' in args:
244  options.customisation_file=args['customs']
245 
246  process = cms.Process('ALCA', self.eras)
247  cb = ConfigBuilder(options, process = process)
248 
249  # Input source
250  process.source = cms.Source(
251  "PoolSource",
252  fileNames = cms.untracked.vstring()
253  )
254 
255  cb.prepare()
256 
257  # FIXME: dirty hack..any way around this?
258  # Tier0 needs the dataset used for ALCAHARVEST step to be a different data-tier
259  for wfl in pclWflws:
260  methodToCall = getattr(process, 'ALCARECOStream'+wfl)
261  methodToCall.dataset.dataTier = cms.untracked.string('ALCAPROMPT')
262 
263  return process
264 
265 
266  def dqmHarvesting(self, datasetName, runNumber, globalTag, **args):
267  """
268  _dqmHarvesting_
269 
270  Proton collisions data taking DQM Harvesting
271 
272  """
273  options = defaultOptions
274  options.scenario = self.cbSc
275  options.step = "HARVESTING"+dqmSeq(args,':dqmHarvesting')
276  options.name = "EDMtoMEConvert"
277  options.conditions = gtNameAndConnect(globalTag, args)
278 
279  process = cms.Process("HARVESTING", self.eras)
280  process.source = dqmIOSource(args)
281 
282  if 'customs' in args:
283  options.customisation_file=args['customs']
284 
285  configBuilder = ConfigBuilder(options, process = process)
286  configBuilder.prepare()
287 
288  harvestingMode(process,datasetName,args,rANDl=False)
289  return process
290 
291 
292  def alcaHarvesting(self, globalTag, datasetName, **args):
293  """
294  _alcaHarvesting_
295 
296  Proton collisions data taking AlCa Harvesting
297 
298  """
299  skims = []
300  if 'skims' in args:
301  skims = args['skims']
302 
303 
304  if 'alcapromptdataset' in args:
305  skims.append('@'+args['alcapromptdataset'])
306 
307  if len(skims) == 0: return None
308  options = defaultOptions
309  options.scenario = self.cbSc if hasattr(self,'cbSc') else self.__class__.__name__
310  options.step = "ALCAHARVEST:"+('+'.join(skims))
311  options.name = "ALCAHARVEST"
312  options.conditions = gtNameAndConnect(globalTag, args)
313 
314  process = cms.Process("ALCAHARVEST", self.eras)
315  process.source = cms.Source("PoolSource")
316 
317  if 'customs' in args:
318  options.customisation_file=args['customs']
319 
320  configBuilder = ConfigBuilder(options, process = process)
321  configBuilder.prepare()
322 
323  #
324  # customise process for particular job
325  #
326  process.source.processingMode = cms.untracked.string('RunsAndLumis')
327  process.source.fileNames = cms.untracked(cms.vstring())
328  process.maxEvents.input = -1
329  process.dqmSaver.workflow = datasetName
330 
331  return process
332 
333  def skimming(self, skims, globalTag,**options):
334  """
335  _skimming_
336 
337  skimming method overload for the prompt skiming
338 
339  """
340  options = defaultOptions
341  options.scenario = self.cbSc if hasattr(self,'cbSc') else self.__class__.__name__
342  options.step = "SKIM:"+('+'.join(skims))
343  options.name = "SKIM"
344  options.conditions = gtNameAndConnect(globalTag, args)
345  process = cms.Process("SKIM", self.eras)
346  process.source = cms.Source("PoolSource")
347 
348  if 'customs' in args:
349  options.customisation_file=args['customs']
350 
351  configBuilder = ConfigBuilder(options, process = process)
352  configBuilder.prepare()
353 
354  return process
355 
356  """
357  def repack(self, **args):
358  options = defaultOptions
359  dictIO(options,args)
360  options.filein='file.dat'
361  options.filetype='DAT'
362  options.scenario = self.cbSc if hasattr(self,'cbSc') else self.__class__.__name__
363  process = cms.Process('REPACK', self.eras)
364  cb = ConfigBuilder(options, process = process, with_output = True,with_input=True)
365  cb.prepare()
366  print cb.pythonCfgCode
367  return process
368  """
def stepSKIMPRODUCER(PhysicsSkims)
Definition: Utils.py:24
def harvestingMode(process, datasetName, args, rANDl=True)
Definition: Utils.py:114
def skimming(self, skims, globalTag, options)
Definition: Reco.py:333
visModifiers
Definition: Reco.py:25
def dqmSeq(args, default)
Definition: Utils.py:129
def gtNameAndConnect(globalTag, args)
Definition: Utils.py:135
def expressProcessing(self, globalTag, args)
Definition: Reco.py:105
expressModifiers
Definition: Reco.py:24
def visualizationProcessing(self, globalTag, args)
Definition: Reco.py:153
def alcaSkim(self, skims, args)
Definition: Reco.py:213
def dictIO(options, args)
Definition: Utils.py:121
def alcaHarvesting(self, globalTag, datasetName, args)
Definition: Reco.py:292
promptModifiers
Definition: Reco.py:23
def addMonitoring(process)
Definition: Utils.py:38
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
def stepALCAPRODUCER(skims)
Definition: Utils.py:9
def promptReco(self, globalTag, args)
Definition: Reco.py:44
def __init__(self)
Definition: Reco.py:19
def dqmHarvesting(self, datasetName, runNumber, globalTag, args)
Definition: Reco.py:266
static std::string join(char **cmd)
Definition: RemoteFile.cc:19
recoSeq
Definition: Reco.py:21
def _checkRepackedFlag(self, options, args)
Definition: Reco.py:35
def dqmIOSource(args)
Definition: Utils.py:103
cbSc
Definition: Reco.py:22