CMS 3D CMS Logo

Reco.py
Go to the documentation of this file.
1 #!/usr/bin/env python3
2 """
3 _pp_
4 
5 Scenario supporting proton collisions
6 
7 """
8 from __future__ import print_function
9 
10 import os
11 import sys
12 
14 from Configuration.DataProcessing.Utils import stepALCAPRODUCER,stepSKIMPRODUCER,addMonitoring,dictIO,dqmIOSource,harvestingMode,dqmSeq,nanoFlavours,gtNameAndConnect
15 import FWCore.ParameterSet.Config as cms
16 from Configuration.DataProcessing.RecoTLR import customisePrompt,customiseExpress
17 
18 class Reco(Scenario):
19  def __init__(self):
20  Scenario.__init__(self)
21  self.recoSeq=''
22  self.cbSc=self.__class__.__name__
23  self.promptModifiers = cms.ModifierChain()
24  self.expressModifiers = cms.ModifierChain()
25  self.visModifiers = cms.ModifierChain()
26  """
27  _pp_
28 
29  Implement configuration building for data processing for proton
30  collision data taking
31 
32  """
33 
34 
35  def _checkRepackedFlag(self, options, **args):
36  if 'repacked' in args:
37  if args['repacked'] == True:
38  options.isRepacked = True
39  else:
40  options.isRepacked = False
41 
42 
43 
44  def promptReco(self, globalTag, **args):
45  """
46  _promptReco_
47 
48  Proton collision data taking prompt reco
49 
50  """
51  step = stepALCAPRODUCER(args['skims'])
52  PhysicsSkimStep = ''
53  if ("PhysicsSkims" in args) :
54  PhysicsSkimStep = stepSKIMPRODUCER(args['PhysicsSkims'])
55  dqmStep = dqmSeq(args,'')
56  options = Options()
57  options.__dict__.update(defaultOptions.__dict__)
58  options.scenario = self.cbSc
59  if ('nThreads' in args) :
60  options.nThreads=args['nThreads']
61 
62  miniAODStep = ''
63  nanoAODStep = ''
64  if not 'customs' in args:
65  args['customs']= []
66 
67  if 'outputs' in args:
68  print(args['outputs'])
69  for a in args['outputs']:
70  if a['dataTier'] == 'MINIAOD':
71  miniAODStep = ',PAT'
72  if a['dataTier'] in ['NANOAOD', 'NANOEDMAOD']:
73  if "nanoFlavours" in args:
74  nanoAODStep = ',NANO'+nanoFlavours(args['nanoFlavours'])
75  else:
76  nanoAODStep = ',NANO:@PHYS+@L1'
77 
78  self._checkRepackedFlag(options, **args)
79 
80  if 'customs' in args:
81  options.customisation_file=args['customs']
82 
83  eiStep=''
84 
85  options.step = 'RAW2DIGI,L1Reco,RECO'
86  options.step += self.recoSeq + eiStep + step + PhysicsSkimStep
87  options.step += miniAODStep + nanoAODStep
88  options.step += ',DQM' + dqmStep + ',ENDJOB'
89 
90  dictIO(options,args)
91  options.conditions = gtNameAndConnect(globalTag, args)
92 
93  process = cms.Process('RECO', cms.ModifierChain(self.eras, self.promptModifiers) )
94  cb = ConfigBuilder(options, process = process, with_output = True)
95 
96  # Input source
97  process.source = cms.Source("PoolSource",
98  fileNames = cms.untracked.vstring()
99  )
100  cb.prepare()
101 
102  addMonitoring(process)
103 
104  return process
105 
106 
107  def expressProcessing(self, globalTag, **args):
108  """
109  _expressProcessing_
110 
111  Proton collision data taking express processing
112 
113  """
114  skims = args['skims']
115  # the AlCaReco skims for PCL should only run during AlCaSkimming step which uses the same configuration on the Tier0 side, for this reason we drop them here
116  pclWkflws = [x for x in skims if "PromptCalibProd" in x]
117  for wfl in pclWkflws:
118  skims.remove(wfl)
119 
120  step = stepALCAPRODUCER(skims)
121  dqmStep= dqmSeq(args,'')
122  options = Options()
123  options.__dict__.update(defaultOptions.__dict__)
124  options.scenario = self.cbSc
125  if ('nThreads' in args) :
126  options.nThreads=args['nThreads']
127 
128  eiStep=''
129 
130  options.step = 'RAW2DIGI,L1Reco,RECO'+self.recoSeq+eiStep+step+',DQM'+dqmStep+',ENDJOB'
131 
132  dictIO(options,args)
133  options.conditions = gtNameAndConnect(globalTag, args)
134 
135 
136  options.filein = 'tobeoverwritten.xyz'
137  if 'inputSource' in args:
138  options.filetype = args['inputSource']
139  process = cms.Process('RECO', cms.ModifierChain(self.eras, self.expressModifiers) )
140 
141  if 'customs' in args:
142  options.customisation_file=args['customs']
143 
144  self._checkRepackedFlag(options,**args)
145 
146  cb = ConfigBuilder(options, process = process, with_output = True, with_input = True)
147 
148  cb.prepare()
149 
150  addMonitoring(process)
151 
152  return process
153 
154 
155  def visualizationProcessing(self, globalTag, **args):
156  """
157  _visualizationProcessing_
158 
159  """
160 
161  options = Options()
162  options.__dict__.update(defaultOptions.__dict__)
163  options.scenario = self.cbSc
164  # FIXME: do we need L1Reco here?
165  options.step =''
166  if 'preFilter' in args:
167  options.step +='FILTER:'+args['preFilter']+','
168 
169  eiStep=''
170 
171  if 'beamSplashRun' in args:
172  options.step += 'RAW2DIGI,L1Reco,RECO'+args['beamSplashRun']+',ENDJOB'
173  print("Using RECO%s step in visualizationProcessing" % args['beamSplashRun'])
174  else :
175  options.step += 'RAW2DIGI,L1Reco,RECO'+eiStep+',ENDJOB'
176 
177 
178 
179  dictIO(options,args)
180  options.conditions = gtNameAndConnect(globalTag, args)
181  options.timeoutOutput = True
182  # FIXME: maybe can go...maybe not
183  options.filein = 'tobeoverwritten.xyz'
184 
185  if 'inputSource' in args:
186  options.filetype = args['inputSource']
187  else:
188  # this is the default as this is what is needed on the OnlineCluster
189  options.filetype = 'DQMDAQ'
190 
191  print("Using %s source"%options.filetype)
192 
193  process = cms.Process('RECO', cms.ModifierChain(self.eras, self.visModifiers) )
194 
195  if 'customs' in args:
196  options.customisation_file=args['customs']
197 
198  self._checkRepackedFlag(options, **args)
199 
200  cb = ConfigBuilder(options, process = process, with_output = True, with_input = True)
201 
202  cb.prepare()
203 
204 
205 
206 
207  # FIXME: not sure abou this one...drop for the moment
208  # addMonitoring(process)
209 
210  return process
211 
212 
213 
214 
215  def alcaSkim(self, skims, **args):
216  """
217  _alcaSkim_
218 
219  AlcaReco processing & skims for proton collisions
220 
221  """
222 
223  step = ""
224  pclWflws = [x for x in skims if "PromptCalibProd" in x]
225  skims = [x for x in skims if x not in pclWflws]
226 
227  if len(pclWflws):
228  step += 'ALCA:'+('+'.join(pclWflws))
229 
230  if len( skims ) > 0:
231  if step != "":
232  step += ","
233  step += "ALCAOUTPUT:"+('+'.join(skims))
234 
235  options = Options()
236  options.__dict__.update(defaultOptions.__dict__)
237  options.scenario = self.cbSc
238  options.step = step
239  options.conditions = args['globaltag'] if 'globaltag' in args else 'None'
240  if 'globalTagConnect' in args and args['globalTagConnect'] != '':
241  options.conditions += ','+args['globalTagConnect']
242 
243  options.triggerResultsProcess = 'RECO'
244 
245  if 'customs' in args:
246  options.customisation_file=args['customs']
247 
248  process = cms.Process('ALCA', self.eras)
249  cb = ConfigBuilder(options, process = process)
250 
251  # Input source
252  process.source = cms.Source(
253  "PoolSource",
254  fileNames = cms.untracked.vstring()
255  )
256 
257  cb.prepare()
258 
259  # FIXME: dirty hack..any way around this?
260  # Tier0 needs the dataset used for ALCAHARVEST step to be a different data-tier
261  for wfl in pclWflws:
262  methodToCall = getattr(process, 'ALCARECOStream'+wfl)
263  methodToCall.dataset.dataTier = cms.untracked.string('ALCAPROMPT')
264 
265  return process
266 
267 
268  def dqmHarvesting(self, datasetName, runNumber, globalTag, **args):
269  """
270  _dqmHarvesting_
271 
272  Proton collisions data taking DQM Harvesting
273 
274  """
275  options = defaultOptions
276  options.scenario = self.cbSc
277  options.step = "HARVESTING"+dqmSeq(args,':dqmHarvesting')
278  options.name = "EDMtoMEConvert"
279  options.conditions = gtNameAndConnect(globalTag, args)
280 
281  process = cms.Process("HARVESTING", self.eras)
282  process.source = dqmIOSource(args)
283 
284  if 'customs' in args:
285  options.customisation_file=args['customs']
286 
287  configBuilder = ConfigBuilder(options, process = process)
288  configBuilder.prepare()
289 
290  harvestingMode(process,datasetName,args,rANDl=False)
291  return process
292 
293 
294  def alcaHarvesting(self, globalTag, datasetName, **args):
295  """
296  _alcaHarvesting_
297 
298  Proton collisions data taking AlCa Harvesting
299 
300  """
301  skims = []
302  if 'skims' in args:
303  skims = args['skims']
304 
305 
306  if 'alcapromptdataset' in args:
307  skims.append('@'+args['alcapromptdataset'])
308 
309  if len(skims) == 0: return None
310  options = defaultOptions
311  options.scenario = self.cbSc if hasattr(self,'cbSc') else self.__class__.__name__
312  options.step = "ALCAHARVEST:"+('+'.join(skims))
313  options.name = "ALCAHARVEST"
314  options.conditions = gtNameAndConnect(globalTag, args)
315 
316  process = cms.Process("ALCAHARVEST", self.eras)
317  process.source = cms.Source("PoolSource")
318 
319  if 'customs' in args:
320  options.customisation_file=args['customs']
321 
322  configBuilder = ConfigBuilder(options, process = process)
323  configBuilder.prepare()
324 
325  #
326  # customise process for particular job
327  #
328  process.source.processingMode = cms.untracked.string('RunsAndLumis')
329  process.source.fileNames = cms.untracked(cms.vstring())
330  process.maxEvents.input = -1
331  process.dqmSaver.workflow = datasetName
332 
333  return process
334 
335  def skimming(self, skims, globalTag,**options):
336  """
337  _skimming_
338 
339  skimming method overload for the prompt skiming
340 
341  """
342  options = defaultOptions
343  options.scenario = self.cbSc if hasattr(self,'cbSc') else self.__class__.__name__
344  options.step = "SKIM:"+('+'.join(skims))
345  options.name = "SKIM"
346  options.conditions = gtNameAndConnect(globalTag, args)
347  process = cms.Process("SKIM", self.eras)
348  process.source = cms.Source("PoolSource")
349 
350  if 'customs' in args:
351  options.customisation_file=args['customs']
352 
353  configBuilder = ConfigBuilder(options, process = process)
354  configBuilder.prepare()
355 
356  return process
357 
358  """
359  def repack(self, **args):
360  options = defaultOptions
361  dictIO(options,args)
362  options.filein='file.dat'
363  options.filetype='DAT'
364  options.scenario = self.cbSc if hasattr(self,'cbSc') else self.__class__.__name__
365  process = cms.Process('REPACK', self.eras)
366  cb = ConfigBuilder(options, process = process, with_output = True,with_input=True)
367  cb.prepare()
368  print cb.pythonCfgCode
369  return process
370  """
def stepSKIMPRODUCER(PhysicsSkims)
Definition: Utils.py:24
def harvestingMode(process, datasetName, args, rANDl=True)
Definition: Utils.py:114
def skimming(self, skims, globalTag, options)
Definition: Reco.py:335
visModifiers
Definition: Reco.py:25
def dqmSeq(args, default)
Definition: Utils.py:129
def gtNameAndConnect(globalTag, args)
Definition: Utils.py:149
def nanoFlavours(flavours)
Definition: Utils.py:135
def expressProcessing(self, globalTag, args)
Definition: Reco.py:107
expressModifiers
Definition: Reco.py:24
def visualizationProcessing(self, globalTag, args)
Definition: Reco.py:155
def alcaSkim(self, skims, args)
Definition: Reco.py:215
def dictIO(options, args)
Definition: Utils.py:121
def alcaHarvesting(self, globalTag, datasetName, args)
Definition: Reco.py:294
promptModifiers
Definition: Reco.py:23
def addMonitoring(process)
Definition: Utils.py:38
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
def stepALCAPRODUCER(skims)
Definition: Utils.py:9
def promptReco(self, globalTag, args)
Definition: Reco.py:44
def __init__(self)
Definition: Reco.py:19
def dqmHarvesting(self, datasetName, runNumber, globalTag, args)
Definition: Reco.py:268
static std::string join(char **cmd)
Definition: RemoteFile.cc:21
recoSeq
Definition: Reco.py:21
def _checkRepackedFlag(self, options, args)
Definition: Reco.py:35
def dqmIOSource(args)
Definition: Utils.py:103
cbSc
Definition: Reco.py:22