CMS 3D CMS Logo

ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python
2 
3 __version__ = "$Revision: 1.19 $"
4 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
5 
6 import FWCore.ParameterSet.Config as cms
7 from FWCore.ParameterSet.Modules import _Module
8 
9 # The following import is provided for backward compatibility reasons.
10 # The function used to be defined in this file.
11 from FWCore.ParameterSet.MassReplace import massReplaceInputTag as MassReplaceInputTag
12 
13 import sys
14 import re
15 import collections
16 from subprocess import Popen,PIPE
17 import FWCore.ParameterSet.DictTypes as DictTypes
18 class Options:
19  pass
20 
21 # the canonical defaults
22 defaultOptions = Options()
23 defaultOptions.datamix = 'DataOnSim'
24 defaultOptions.isMC=False
25 defaultOptions.isData=True
26 defaultOptions.step=''
27 defaultOptions.pileup='NoPileUp'
28 defaultOptions.pileup_input = None
29 defaultOptions.pileup_dasoption = ''
30 defaultOptions.geometry = 'SimDB'
31 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
32 defaultOptions.magField = ''
33 defaultOptions.conditions = None
34 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
35 defaultOptions.harvesting= 'AtRunEnd'
36 defaultOptions.gflash = False
37 defaultOptions.number = -1
38 defaultOptions.number_out = None
39 defaultOptions.arguments = ""
40 defaultOptions.name = "NO NAME GIVEN"
41 defaultOptions.evt_type = ""
42 defaultOptions.filein = ""
43 defaultOptions.dasquery=""
44 defaultOptions.dasoption=""
45 defaultOptions.secondfilein = ""
46 defaultOptions.customisation_file = []
47 defaultOptions.customisation_file_unsch = []
48 defaultOptions.customise_commands = ""
49 defaultOptions.inline_custom=False
50 defaultOptions.particleTable = 'pythiapdt'
51 defaultOptions.particleTableList = ['pythiapdt','pdt']
52 defaultOptions.dirin = ''
53 defaultOptions.dirout = ''
54 defaultOptions.filetype = 'EDM'
55 defaultOptions.fileout = 'output.root'
56 defaultOptions.filtername = ''
57 defaultOptions.lazy_download = False
58 defaultOptions.custom_conditions = ''
59 defaultOptions.hltProcess = ''
60 defaultOptions.eventcontent = None
61 defaultOptions.datatier = None
62 defaultOptions.inlineEventContent = True
63 defaultOptions.inlineObjets =''
64 defaultOptions.hideGen=False
65 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
66 defaultOptions.beamspot=None
67 defaultOptions.outputDefinition =''
68 defaultOptions.inputCommands = None
69 defaultOptions.outputCommands = None
70 defaultOptions.inputEventContent = ''
71 defaultOptions.dropDescendant = False
72 defaultOptions.relval = None
73 defaultOptions.profile = None
74 defaultOptions.isRepacked = False
75 defaultOptions.restoreRNDSeeds = False
76 defaultOptions.donotDropOnInput = ''
77 defaultOptions.python_filename =''
78 defaultOptions.io=None
79 defaultOptions.lumiToProcess=None
80 defaultOptions.fast=False
81 defaultOptions.runsAndWeightsForMC = None
82 defaultOptions.runsScenarioForMC = None
83 defaultOptions.runUnscheduled = False
84 defaultOptions.timeoutOutput = False
85 defaultOptions.nThreads = '1'
86 
87 # some helper routines
88 def dumpPython(process,name):
89  theObject = getattr(process,name)
90  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
91  return "process."+name+" = " + theObject.dumpPython("process")
92  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
93  return "process."+name+" = " + theObject.dumpPython()+"\n"
94  else:
95  return "process."+name+" = " + theObject.dumpPython()+"\n"
96 def filesFromList(fileName,s=None):
97  import os
98  import FWCore.ParameterSet.Config as cms
99  prim=[]
100  sec=[]
101  for line in open(fileName,'r'):
102  if line.count(".root")>=2:
103  #two files solution...
104  entries=line.replace("\n","").split()
105  if not entries[0] in prim:
106  prim.append(entries[0])
107  if not entries[1] in sec:
108  sec.append(entries[1])
109  elif (line.find(".root")!=-1):
110  entry=line.replace("\n","")
111  if not entry in prim:
112  prim.append(entry)
113  if s:
114  if not hasattr(s,"fileNames"):
115  s.fileNames=cms.untracked.vstring(prim)
116  else:
117  s.fileNames.extend(prim)
118  if len(sec)!=0:
119  if not hasattr(s,"secondaryFileNames"):
120  s.secondaryFileNames=cms.untracked.vstring(sec)
121  else:
122  s.secondaryFileNames.extend(sec)
123  print "found files: ",prim
124  if len(prim)==0:
125  raise Exception("There are not files in input from the file list")
126  if len(sec)!=0:
127  print "found parent files:",sec
128  return (prim,sec)
129 
130 def filesFromDASQuery(query,option="",s=None):
131  import os,time
132  import FWCore.ParameterSet.Config as cms
133  prim=[]
134  sec=[]
135  print "the query is",query
136  eC=5
137  count=0
138  while eC!=0 and count<3:
139  if count!=0:
140  print 'Sleeping, then retrying DAS'
141  time.sleep(100)
142  p = Popen('dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=True)
143  pipe=p.stdout.read()
144  tupleP = os.waitpid(p.pid, 0)
145  eC=tupleP[1]
146  count=count+1
147  if eC==0:
148  print "DAS succeeded after",count,"attempts",eC
149  else:
150  print "DAS failed 3 times- I give up"
151  for line in pipe.split('\n'):
152  if line.count(".root")>=2:
153  #two files solution...
154  entries=line.replace("\n","").split()
155  if not entries[0] in prim:
156  prim.append(entries[0])
157  if not entries[1] in sec:
158  sec.append(entries[1])
159  elif (line.find(".root")!=-1):
160  entry=line.replace("\n","")
161  if not entry in prim:
162  prim.append(entry)
163  if s:
164  if not hasattr(s,"fileNames"):
165  s.fileNames=cms.untracked.vstring(prim)
166  else:
167  s.fileNames.extend(prim)
168  if len(sec)!=0:
169  if not hasattr(s,"secondaryFileNames"):
170  s.secondaryFileNames=cms.untracked.vstring(sec)
171  else:
172  s.secondaryFileNames.extend(sec)
173  print "found files: ",prim
174  if len(sec)!=0:
175  print "found parent files:",sec
176  return (prim,sec)
177 
178 def anyOf(listOfKeys,dict,opt=None):
179  for k in listOfKeys:
180  if k in dict:
181  toReturn=dict[k]
182  dict.pop(k)
183  return toReturn
184  if opt!=None:
185  return opt
186  else:
187  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
188 
190  """The main building routines """
191 
192  def __init__(self, options, process = None, with_output = False, with_input = False ):
193  """options taken from old cmsDriver and optparse """
194 
195  options.outfile_name = options.dirout+options.fileout
196 
197  self._options = options
198 
199  if self._options.isData and options.isMC:
200  raise Exception("ERROR: You may specify only --data or --mc, not both")
201  #if not self._options.conditions:
202  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
203 
204  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
205  if 'ENDJOB' in self._options.step:
206  if (hasattr(self._options,"outputDefinition") and \
207  self._options.outputDefinition != '' and \
208  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
209  (hasattr(self._options,"datatier") and \
210  self._options.datatier and \
211  'DQMIO' in self._options.datatier):
212  print "removing ENDJOB from steps since not compatible with DQMIO dataTier"
213  self._options.step=self._options.step.replace(',ENDJOB','')
214 
215 
216 
217  # what steps are provided by this class?
218  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
219  self.stepMap={}
220  self.stepKeys=[]
221  for step in self._options.step.split(","):
222  if step=='': continue
223  stepParts = step.split(":")
224  stepName = stepParts[0]
225  if stepName not in stepList and not stepName.startswith('re'):
226  raise ValueError("Step "+stepName+" unknown")
227  if len(stepParts)==1:
228  self.stepMap[stepName]=""
229  elif len(stepParts)==2:
230  self.stepMap[stepName]=stepParts[1].split('+')
231  elif len(stepParts)==3:
232  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
233  else:
234  raise ValueError("Step definition "+step+" invalid")
235  self.stepKeys.append(stepName)
236 
237  #print "map of steps is:",self.stepMap
238 
239  self.with_output = with_output
240  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
241  self.with_output = False
242  self.with_input = with_input
243  if process == None:
244  self.process = cms.Process(self._options.name)
245  else:
246  self.process = process
247  self.imports = []
248  self.define_Configs()
249  self.schedule = list()
250 
251  # we are doing three things here:
252  # creating a process to catch errors
253  # building the code to re-create the process
254 
255  self.additionalCommands = []
256  # TODO: maybe a list of to be dumped objects would help as well
257  self.blacklist_paths = []
258  self.addedObjects = []
259  self.additionalOutputs = {}
260 
261  self.productionFilterSequence = None
262  self.labelsToAssociate=[]
263  self.nextScheduleIsConditional=False
264  self.conditionalPaths=[]
265  self.excludedPaths=[]
266 
267  def profileOptions(self):
268  """
269  addIgProfService
270  Function to add the igprof profile service so that you can dump in the middle
271  of the run.
272  """
273  profileOpts = self._options.profile.split(':')
274  profilerStart = 1
275  profilerInterval = 100
276  profilerFormat = None
277  profilerJobFormat = None
278 
279  if len(profileOpts):
280  #type, given as first argument is unused here
281  profileOpts.pop(0)
282  if len(profileOpts):
283  startEvent = profileOpts.pop(0)
284  if not startEvent.isdigit():
285  raise Exception("%s is not a number" % startEvent)
286  profilerStart = int(startEvent)
287  if len(profileOpts):
288  eventInterval = profileOpts.pop(0)
289  if not eventInterval.isdigit():
290  raise Exception("%s is not a number" % eventInterval)
291  profilerInterval = int(eventInterval)
292  if len(profileOpts):
293  profilerFormat = profileOpts.pop(0)
294 
295 
296  if not profilerFormat:
297  profilerFormat = "%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace("_cfi", ""),
298  self._options.step,
299  self._options.pileup,
300  self._options.conditions,
301  self._options.datatier,
302  self._options.profileTypeLabel)
303  if not profilerJobFormat and profilerFormat.endswith(".gz"):
304  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
305  elif not profilerJobFormat:
306  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
307 
308  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
309 
310  def load(self,includeFile):
311  includeFile = includeFile.replace('/','.')
312  self.process.load(includeFile)
313  return sys.modules[includeFile]
314 
315  def loadAndRemember(self, includeFile):
316  """helper routine to load am memorize imports"""
317  # we could make the imports a on-the-fly data method of the process instance itself
318  # not sure if the latter is a good idea
319  includeFile = includeFile.replace('/','.')
320  self.imports.append(includeFile)
321  self.process.load(includeFile)
322  return sys.modules[includeFile]
323 
324  def executeAndRemember(self, command):
325  """helper routine to remember replace statements"""
326  self.additionalCommands.append(command)
327  if not command.strip().startswith("#"):
328  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
329  import re
330  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
331  #exec(command.replace("process.","self.process."))
332 
333  def addCommon(self):
334  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
335  self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring('ProductNotFound'),fileMode = cms.untracked.string('FULLMERGE'))
336  else:
337  self.process.options = cms.untracked.PSet( )
338 
339  self.addedObjects.append(("","options"))
340 
341  if self._options.lazy_download:
342  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
343  stats = cms.untracked.bool(True),
344  enable = cms.untracked.bool(True),
345  cacheHint = cms.untracked.string("lazy-download"),
346  readHint = cms.untracked.string("read-ahead-buffered")
347  )
348  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
349 
350  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
351  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
352 
353  if self._options.profile:
354  (start, interval, eventFormat, jobFormat)=self.profileOptions()
355  self.process.IgProfService = cms.Service("IgProfService",
356  reportFirstEvent = cms.untracked.int32(start),
357  reportEventInterval = cms.untracked.int32(interval),
358  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
359  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
360  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
361 
362  def addMaxEvents(self):
363  """Here we decide how many evts will be processed"""
364  self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(int(self._options.number)))
365  if self._options.number_out:
366  self.process.maxEvents.output = cms.untracked.int32(int(self._options.number_out))
367  self.addedObjects.append(("","maxEvents"))
368 
369  def addSource(self):
370  """Here the source is built. Priority: file, generator"""
371  self.addedObjects.append(("Input source","source"))
372 
373  def filesFromOption(self):
374  for entry in self._options.filein.split(','):
375  print "entry",entry
376  if entry.startswith("filelist:"):
377  filesFromList(entry[9:],self.process.source)
378  elif entry.startswith("dbs:") or entry.startswith("das:"):
379  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
380  else:
381  self.process.source.fileNames.append(self._options.dirin+entry)
382  if self._options.secondfilein:
383  if not hasattr(self.process.source,"secondaryFileNames"):
384  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
385  for entry in self._options.secondfilein.split(','):
386  print "entry",entry
387  if entry.startswith("filelist:"):
388  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
389  elif entry.startswith("dbs:") or entry.startswith("das:"):
390  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
391  else:
392  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
393 
394  if self._options.filein or self._options.dasquery:
395  if self._options.filetype == "EDM":
396  self.process.source=cms.Source("PoolSource",
397  fileNames = cms.untracked.vstring(),
398  secondaryFileNames= cms.untracked.vstring())
399  filesFromOption(self)
400  elif self._options.filetype == "DAT":
401  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
402  filesFromOption(self)
403  elif self._options.filetype == "LHE":
404  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
405  if self._options.filein.startswith("lhe:"):
406  #list the article directory automatically
407  args=self._options.filein.split(':')
408  article=args[1]
409  print 'LHE input from article ',article
410  location='/store/lhe/'
411  import os
412  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
413  for line in textOfFiles:
414  for fileName in [x for x in line.split() if '.lhe' in x]:
415  self.process.source.fileNames.append(location+article+'/'+fileName)
416  #check first if list of LHE files is loaded (not empty)
417  if len(line)<2:
418  print 'Issue to load LHE files, please check and try again.'
419  sys.exit(-1)
420  #Additional check to protect empty fileNames in process.source
421  if len(self.process.source.fileNames)==0:
422  print 'Issue with empty filename, but can pass line check'
423  sys.exit(-1)
424  if len(args)>2:
425  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
426  else:
427  filesFromOption(self)
428 
429  elif self._options.filetype == "DQM":
430  self.process.source=cms.Source("DQMRootSource",
431  fileNames = cms.untracked.vstring())
432  filesFromOption(self)
433 
434  elif self._options.filetype == "DQMDAQ":
435  # FIXME: how to configure it if there are no input files specified?
436  self.process.source=cms.Source("DQMStreamerReader")
437 
438 
439  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
440  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
441 
442  if self._options.dasquery!='':
443  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
444  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
445 
446  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
447  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
448 
449  ##drop LHEXMLStringProduct on input to save memory if appropriate
450  if 'GEN' in self.stepMap.keys():
451  if self._options.inputCommands:
452  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
453  else:
454  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
455 
456  if self.process.source and self._options.inputCommands:
457  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
458  for command in self._options.inputCommands.split(','):
459  # remove whitespace around the keep/drop statements
460  command = command.strip()
461  if command=='': continue
462  self.process.source.inputCommands.append(command)
463  if not self._options.dropDescendant:
464  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
465 
466  if self._options.lumiToProcess:
467  import FWCore.PythonUtilities.LumiList as LumiList
468  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
469 
470  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
471  if self.process.source is None:
472  self.process.source=cms.Source("EmptySource")
473 
474  # modify source in case of run-dependent MC
475  self.runsAndWeights=None
476  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
477  if not self._options.isMC :
478  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
479  if self._options.runsAndWeightsForMC:
480  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
481  else:
482  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
483  if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
484  __import__(RunsAndWeights[self._options.runsScenarioForMC])
485  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
486  else:
487  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
488 
489  if self.runsAndWeights:
490  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
491  ThrowAndSetRandomRun.throwAndSetRandomRun(self.process.source,self.runsAndWeights)
492  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
493  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
494 
495  return
496 
497  def addOutput(self):
498  """ Add output module to the process """
499  result=""
500  if self._options.outputDefinition:
501  if self._options.datatier:
502  print "--datatier & --eventcontent options ignored"
503 
504  #new output convention with a list of dict
505  outList = eval(self._options.outputDefinition)
506  for (id,outDefDict) in enumerate(outList):
507  outDefDictStr=outDefDict.__str__()
508  if not isinstance(outDefDict,dict):
509  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
510  #requires option: tier
511  theTier=anyOf(['t','tier','dataTier'],outDefDict)
512  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
513  ## event content
514  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
515  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
516  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
517  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
518  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
519  # module label has a particular role
520  if not theModuleLabel:
521  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
522  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
523  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
524  ]
525  for name in tryNames:
526  if not hasattr(self.process,name):
527  theModuleLabel=name
528  break
529  if not theModuleLabel:
530  raise Exception("cannot find a module label for specification: "+outDefDictStr)
531  if id==0:
532  defaultFileName=self._options.outfile_name
533  else:
534  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
535 
536  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
537  if not theFileName.endswith('.root'):
538  theFileName+='.root'
539 
540  if len(outDefDict.keys()):
541  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
542  if theStreamType=='DQMIO': theStreamType='DQM'
543  if theStreamType=='ALL':
544  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
545  else:
546  theEventContent = getattr(self.process, theStreamType+"EventContent")
547 
548 
549  addAlCaSelects=False
550  if theStreamType=='ALCARECO' and not theFilterName:
551  theFilterName='StreamALCACombined'
552  addAlCaSelects=True
553 
554  CppType='PoolOutputModule'
555  if self._options.timeoutOutput:
556  CppType='TimeoutPoolOutputModule'
557  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
558  output = cms.OutputModule(CppType,
559  theEventContent.clone(),
560  fileName = cms.untracked.string(theFileName),
561  dataset = cms.untracked.PSet(
562  dataTier = cms.untracked.string(theTier),
563  filterName = cms.untracked.string(theFilterName))
564  )
565  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
566  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
567  if not theSelectEvent and hasattr(self.process,'filtering_step'):
568  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
569  if theSelectEvent:
570  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
571 
572  if addAlCaSelects:
573  if not hasattr(output,'SelectEvents'):
574  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
575  for alca in self.AlCaPaths:
576  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
577 
578 
579  if hasattr(self.process,theModuleLabel):
580  raise Exception("the current process already has a module "+theModuleLabel+" defined")
581  #print "creating output module ",theModuleLabel
582  setattr(self.process,theModuleLabel,output)
583  outputModule=getattr(self.process,theModuleLabel)
584  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
585  path=getattr(self.process,theModuleLabel+'_step')
586  self.schedule.append(path)
587 
588  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
589  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"):
590  return label
591  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
592  if theExtraOutputCommands:
593  if not isinstance(theExtraOutputCommands,list):
594  raise Exception("extra ouput command in --option must be a list of strings")
595  if hasattr(self.process,theStreamType+"EventContent"):
596  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
597  else:
598  outputModule.outputCommands.extend(theExtraOutputCommands)
599 
600  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
601 
602  ##ends the --output options model
603  return result
604 
605  streamTypes=self._options.eventcontent.split(',')
606  tiers=self._options.datatier.split(',')
607  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
608  raise Exception("number of event content arguments does not match number of datatier arguments")
609 
610  # if the only step is alca we don't need to put in an output
611  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
612  return "\n"
613 
614  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
615  if streamType=='': continue
616  if streamType == 'ALCARECO' and not 'ALCAPRODUCER' in self._options.step: continue
617  if streamType=='DQMIO': streamType='DQM'
618  theEventContent = getattr(self.process, streamType+"EventContent")
619  if i==0:
620  theFileName=self._options.outfile_name
621  theFilterName=self._options.filtername
622  else:
623  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
624  theFilterName=self._options.filtername
625  CppType='PoolOutputModule'
626  if self._options.timeoutOutput:
627  CppType='TimeoutPoolOutputModule'
628  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
629  output = cms.OutputModule(CppType,
630  theEventContent,
631  fileName = cms.untracked.string(theFileName),
632  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
633  filterName = cms.untracked.string(theFilterName)
634  )
635  )
636  if hasattr(self.process,"generation_step") and streamType!='LHE':
637  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
638  if hasattr(self.process,"filtering_step"):
639  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
640 
641  if streamType=='ALCARECO':
642  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
643 
644  if "MINIAOD" in streamType:
645  from PhysicsTools.PatAlgos.slimming.miniAOD_tools import miniAOD_customizeOutput
647 
648  outputModuleName=streamType+'output'
649  setattr(self.process,outputModuleName,output)
650  outputModule=getattr(self.process,outputModuleName)
651  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
652  path=getattr(self.process,outputModuleName+'_step')
653  self.schedule.append(path)
654 
655  if self._options.outputCommands and streamType!='DQM':
656  for evct in self._options.outputCommands.split(','):
657  if not evct: continue
658  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
659 
660  if not self._options.inlineEventContent:
661  def doNotInlineEventContent(instance,label = "process."+streamType+"EventContent.outputCommands"):
662  return label
663  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
664 
665  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
666 
667  return result
668 
670  """
671  Add selected standard sequences to the process
672  """
673  # load the pile up file
674  if self._options.pileup:
675  pileupSpec=self._options.pileup.split(',')[0]
676 
677  # Does the requested pile-up scenario exist?
678  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
679  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
680  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
681  raise Exception(message)
682 
683  # Put mixing parameters in a dictionary
684  if '.' in pileupSpec:
685  mixingDict={'file':pileupSpec}
686  elif pileupSpec.startswith('file:'):
687  mixingDict={'file':pileupSpec[5:]}
688  else:
689  import copy
690  mixingDict=copy.copy(Mixing[pileupSpec])
691  if len(self._options.pileup.split(','))>1:
692  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
693 
694  # Load the pu cfg file corresponding to the requested pu scenario
695  if 'file:' in pileupSpec:
696  #the file is local
697  self.process.load(mixingDict['file'])
698  print "inlining mixing module configuration"
699  self._options.inlineObjets+=',mix'
700  else:
701  self.loadAndRemember(mixingDict['file'])
702 
703  mixingDict.pop('file')
704  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
705  if self._options.pileup_input:
706  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
707  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
708  elif self._options.pileup_input.startswith("filelist:"):
709  mixingDict['F']=(filesFromList(self._options.pileup_input[9:]))[0]
710  else:
711  mixingDict['F']=self._options.pileup_input.split(',')
712  specialization=defineMixing(mixingDict)
713  for command in specialization:
714  self.executeAndRemember(command)
715  if len(mixingDict)!=0:
716  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
717 
718 
719  # load the geometry file
720  try:
721  if len(self.stepMap):
722  self.loadAndRemember(self.GeometryCFF)
723  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
724  self.loadAndRemember(self.SimGeometryCFF)
725  if self.geometryDBLabel:
726  self.executeAndRemember('process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
727  except ImportError:
728  print "Geometry option",self._options.geometry,"unknown."
729  raise
730 
731  if len(self.stepMap):
732  self.loadAndRemember(self.magFieldCFF)
733 
734  for stepName in self.stepKeys:
735  stepSpec = self.stepMap[stepName]
736  print "Step:", stepName,"Spec:",stepSpec
737  if stepName.startswith('re'):
738  ##add the corresponding input content
739  if stepName[2:] not in self._options.donotDropOnInput:
740  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
741  stepName=stepName[2:]
742  if stepSpec=="":
743  getattr(self,"prepare_"+stepName)(sequence = getattr(self,stepName+"DefaultSeq"))
744  elif type(stepSpec)==list:
745  getattr(self,"prepare_"+stepName)(sequence = '+'.join(stepSpec))
746  elif type(stepSpec)==tuple:
747  getattr(self,"prepare_"+stepName)(sequence = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
748  else:
749  raise ValueError("Invalid step definition")
750 
751  if self._options.restoreRNDSeeds!=False:
752  #it is either True, or a process name
753  if self._options.restoreRNDSeeds==True:
754  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
755  else:
756  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
757  if self._options.inputEventContent or self._options.inputCommands:
758  if self._options.inputCommands:
759  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
760  else:
761  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
762 
763 
765  if self._options.inputEventContent:
766  import copy
767  def dropSecondDropStar(iec):
768  #drop occurence of 'drop *' in the list
769  count=0
770  for item in iec:
771  if item=='drop *':
772  if count!=0:
773  iec.remove(item)
774  count+=1
775 
776 
777  ## allow comma separated input eventcontent
778  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
779  for evct in self._options.inputEventContent.split(','):
780  if evct=='': continue
781  theEventContent = getattr(self.process, evct+"EventContent")
782  if hasattr(theEventContent,'outputCommands'):
783  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
784  if hasattr(theEventContent,'inputCommands'):
785  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
786 
787  dropSecondDropStar(self.process.source.inputCommands)
788 
789  if not self._options.dropDescendant:
790  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
791 
792 
793  return
794 
795  def addConditions(self):
796  """Add conditions to the process"""
797  if not self._options.conditions: return
798 
799  if 'FrontierConditions_GlobalTag' in self._options.conditions:
800  print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line'
801  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
802 
803  self.loadAndRemember(self.ConditionsDefaultCFF)
804  from Configuration.AlCa.GlobalTag import GlobalTag
805  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
806  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
807  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
808 
809 
810  def addCustomise(self,unsch=0):
811  """Include the customise code """
812 
813  custOpt=[]
814  if unsch==0:
815  for c in self._options.customisation_file:
816  custOpt.extend(c.split(","))
817  else:
818  for c in self._options.customisation_file_unsch:
819  custOpt.extend(c.split(","))
820 
821  custMap=DictTypes.SortedKeysDict()
822  for opt in custOpt:
823  if opt=='': continue
824  if opt.count('.')>1:
825  raise Exception("more than . in the specification:"+opt)
826  fileName=opt.split('.')[0]
827  if opt.count('.')==0: rest='customise'
828  else:
829  rest=opt.split('.')[1]
830  if rest=='py': rest='customise' #catch the case of --customise file.py
831 
832  if fileName in custMap:
833  custMap[fileName].extend(rest.split('+'))
834  else:
835  custMap[fileName]=rest.split('+')
836 
837  if len(custMap)==0:
838  final_snippet='\n'
839  else:
840  final_snippet='\n# customisation of the process.\n'
841 
842  allFcn=[]
843  for opt in custMap:
844  allFcn.extend(custMap[opt])
845  for fcn in allFcn:
846  if allFcn.count(fcn)!=1:
847  raise Exception("cannot specify twice "+fcn+" as a customisation method")
848 
849  for f in custMap:
850  # let python search for that package and do syntax checking at the same time
851  packageName = f.replace(".py","").replace("/",".")
852  __import__(packageName)
853  package = sys.modules[packageName]
854 
855  # now ask the package for its definition and pick .py instead of .pyc
856  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
857 
858  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
859  if self._options.inline_custom:
860  for line in file(customiseFile,'r'):
861  if "import FWCore.ParameterSet.Config" in line:
862  continue
863  final_snippet += line
864  else:
865  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
866  for fcn in custMap[f]:
867  print "customising the process with",fcn,"from",f
868  if not hasattr(package,fcn):
869  #bound to fail at run time
870  raise Exception("config "+f+" has no function "+fcn)
871  #execute the command
872  self.process=getattr(package,fcn)(self.process)
873  #and print it in the configuration
874  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
875  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
876 
877  if len(custMap)!=0:
878  final_snippet += '\n# End of customisation functions\n'
879 
880  ### now for a useful command
881  return final_snippet
882 
884  final_snippet='\n# Customisation from command line\n'
885  if self._options.customise_commands:
886  import string
887  for com in self._options.customise_commands.split('\\n'):
888  com=string.lstrip(com)
889  self.executeAndRemember(com)
890  final_snippet +='\n'+com
891 
892  return final_snippet
893 
894  #----------------------------------------------------------------------------
895  # here the methods to define the python includes for each step or
896  # conditions
897  #----------------------------------------------------------------------------
898  def define_Configs(self):
899  if len(self.stepMap):
900  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
901  if self._options.particleTable not in defaultOptions.particleTableList:
902  print 'Invalid particle table provided. Options are:'
903  print defaultOptions.particleTable
904  sys.exit(-1)
905  else:
906  if len(self.stepMap):
907  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
908 
909  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
910 
911  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
912  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
913  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
914  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
915  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
916  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
917  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
918  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
919  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
920  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
921  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
922  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
923  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
924  self.EIDefaultCFF=None
925  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
926  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
927  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
928  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
929  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
930  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
931  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
932  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
933  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
934  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
935  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
936 
937  if "DATAMIX" in self.stepMap.keys():
938  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
939  if self._options.datamix == 'PreMix':
940  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDMPreMix_cff"
941  else:
942  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
943  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
944  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
945 
946  if "DIGIPREMIX" in self.stepMap.keys():
947  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_PreMix_cff"
948  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawPreMixing_cff"
949  self.L1EMDefaultCFF="Configuration/StandardSequences/SimL1EmulatorPreMix_cff"
950 
951  self.ALCADefaultSeq=None
952  self.LHEDefaultSeq='externalLHEProducer'
953  self.GENDefaultSeq='pgen'
954  self.SIMDefaultSeq='psim'
955  self.DIGIDefaultSeq='pdigi'
956  self.DIGIPREMIXDefaultSeq='pdigi'
957  self.DIGIPREMIX_S2DefaultSeq='pdigi'
958  self.DATAMIXDefaultSeq=None
959  self.DIGI2RAWDefaultSeq='DigiToRaw'
960  self.HLTDefaultSeq='GRun'
961  self.L1DefaultSeq=None
962  self.L1REPACKDefaultSeq='GT'
963  self.HARVESTINGDefaultSeq=None
964  self.ALCAHARVESTDefaultSeq=None
965  self.CFWRITERDefaultSeq=None
966  self.RAW2DIGIDefaultSeq='RawToDigi'
967  self.L1RecoDefaultSeq='L1Reco'
968  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
969  if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
970  self.RECODefaultSeq='reconstruction'
971  else:
972  self.RECODefaultSeq='reconstruction_fromRECO'
973 
974  self.EIDefaultSeq='top'
975  self.POSTRECODefaultSeq=None
976  self.L1HwValDefaultSeq='L1HwVal'
977  self.DQMDefaultSeq='DQMOffline'
978  self.VALIDATIONDefaultSeq=''
979  self.ENDJOBDefaultSeq='endOfProcess'
980  self.REPACKDefaultSeq='DigiToRawRepack'
981  self.PATDefaultSeq='miniAOD'
982  self.PATGENDefaultSeq='miniGEN'
983 
984  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
985 
986  if not self._options.beamspot:
987  self._options.beamspot=VtxSmearedDefaultKey
988 
989  # if its MC then change the raw2digi
990  if self._options.isMC==True:
991  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
992  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
993  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
994  self.PATGENDefaultCFF="Configuration/StandardSequences/PATGEN_cff"
995  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
996  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
997  else:
998  self._options.beamspot = None
999 
1000  #patch for gen, due to backward incompatibility
1001  if 'reGEN' in self.stepMap:
1002  self.GENDefaultSeq='fixGenInfo'
1003 
1004  if self._options.scenario=='cosmics':
1005  self._options.pileup='Cosmics'
1006  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1007  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1008  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1009  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1010  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1011  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1012  if self._options.isMC==True:
1013  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1014  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1015  self.RECODefaultSeq='reconstructionCosmics'
1016  self.DQMDefaultSeq='DQMOfflineCosmics'
1017 
1018  if self._options.scenario=='HeavyIons':
1019  if not self._options.beamspot:
1020  self._options.beamspot=VtxSmearedHIDefaultKey
1021  self.HLTDefaultSeq = 'HIon'
1022  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1023  self.VALIDATIONDefaultSeq=''
1024  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1025  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1026  self.RECODefaultSeq='reconstructionHeavyIons'
1027  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1028  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1029  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1030  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1031  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1032  if self._options.isMC==True:
1033  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1034 
1035 
1036  self.RAW2RECODefaultSeq=','.join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1037 
1038  self.USERDefaultSeq='user'
1039  self.USERDefaultCFF=None
1040 
1041  # the magnetic field
1042  if self._options.isData:
1043  if self._options.magField==defaultOptions.magField:
1044  print "magnetic field option forced to: AutoFromDBCurrent"
1045  self._options.magField='AutoFromDBCurrent'
1046  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1047  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1048 
1049  # the geometry
1050  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1051  self.geometryDBLabel=None
1052  simGeometry=''
1053  if self._options.fast:
1054  if 'start' in self._options.conditions.lower():
1055  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1056  else:
1057  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1058  else:
1059  def inGeometryKeys(opt):
1060  from Configuration.StandardSequences.GeometryConf import GeometryConf
1061  if opt in GeometryConf:
1062  return GeometryConf[opt]
1063  else:
1064  return opt
1065 
1066  geoms=self._options.geometry.split(',')
1067  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1068  if len(geoms)==2:
1069  #may specify the reco geometry
1070  if '/' in geoms[1] or '_cff' in geoms[1]:
1071  self.GeometryCFF=geoms[1]
1072  else:
1073  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1074 
1075  if (geoms[0].startswith('DB:')):
1076  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1077  self.geometryDBLabel=geoms[0][3:]
1078  print "with DB:"
1079  else:
1080  if '/' in geoms[0] or '_cff' in geoms[0]:
1081  self.SimGeometryCFF=geoms[0]
1082  else:
1083  simGeometry=geoms[0]
1084  if self._options.gflash==True:
1085  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1086  else:
1087  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1088 
1089  # synchronize the geometry configuration and the FullSimulation sequence to be used
1090  if simGeometry not in defaultOptions.geometryExtendedOptions:
1091  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1092 
1093  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1094  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1095  self._options.beamspot='NoSmear'
1096 
1097  # fastsim requires some changes to the default cff files and sequences
1098  if self._options.fast:
1099  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1100  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1101  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1102  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1103  self.DQMOFFLINEDefaultCFF="FastSimulation.Configuration.DQMOfflineMC_cff"
1104 
1105  # Mixing
1106  if self._options.pileup=='default':
1107  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1108  self._options.pileup=MixingDefaultKey
1109 
1110 
1111  #not driven by a default cff anymore
1112  if self._options.isData:
1113  self._options.pileup=None
1114 
1115 
1116  self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1117 
1118  # for alca, skims, etc
1119  def addExtraStream(self,name,stream,workflow='full'):
1120  # define output module and go from there
1121  output = cms.OutputModule("PoolOutputModule")
1122  if stream.selectEvents.parameters_().__len__()!=0:
1123  output.SelectEvents = stream.selectEvents
1124  else:
1125  output.SelectEvents = cms.untracked.PSet()
1126  output.SelectEvents.SelectEvents=cms.vstring()
1127  if isinstance(stream.paths,tuple):
1128  for path in stream.paths:
1129  output.SelectEvents.SelectEvents.append(path.label())
1130  else:
1131  output.SelectEvents.SelectEvents.append(stream.paths.label())
1132 
1133 
1134 
1135  if isinstance(stream.content,str):
1136  evtPset=getattr(self.process,stream.content)
1137  for p in evtPset.parameters_():
1138  setattr(output,p,getattr(evtPset,p))
1139  if not self._options.inlineEventContent:
1140  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1141  return label
1142  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1143  else:
1144  output.outputCommands = stream.content
1145 
1146 
1147  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1148 
1149  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1150  filterName = cms.untracked.string(stream.name))
1151 
1152  if self._options.filtername:
1153  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1154 
1155  #add an automatic flushing to limit memory consumption
1156  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1157 
1158  if workflow in ("producers,full"):
1159  if isinstance(stream.paths,tuple):
1160  for path in stream.paths:
1161  self.schedule.append(path)
1162  else:
1163  self.schedule.append(stream.paths)
1164 
1165 
1166  # in case of relvals we don't want to have additional outputs
1167  if (not self._options.relval) and workflow in ("full","output"):
1168  self.additionalOutputs[name] = output
1169  setattr(self.process,name,output)
1170 
1171  if workflow == 'output':
1172  # adjust the select events to the proper trigger results from previous process
1173  filterList = output.SelectEvents.SelectEvents
1174  for i, filter in enumerate(filterList):
1175  filterList[i] = filter+":"+self._options.triggerResultsProcess
1176 
1177  return output
1178 
1179  #----------------------------------------------------------------------------
1180  # here the methods to create the steps. Of course we are doing magic here ;)
1181  # prepare_STEPNAME modifies self.process and what else's needed.
1182  #----------------------------------------------------------------------------
1183 
1184  def loadDefaultOrSpecifiedCFF(self, sequence,defaultCFF):
1185  if ( len(sequence.split('.'))==1 ):
1186  l=self.loadAndRemember(defaultCFF)
1187  elif ( len(sequence.split('.'))==2 ):
1188  l=self.loadAndRemember(sequence.split('.')[0])
1189  sequence=sequence.split('.')[1]
1190  else:
1191  print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a"
1192  print sequence,"not recognized"
1193  raise
1194  return l
1195 
1196  def scheduleSequence(self,seq,prefix,what='Path'):
1197  if '*' in seq:
1198  #create only one path with all sequences in it
1199  for i,s in enumerate(seq.split('*')):
1200  if i==0:
1201  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1202  else:
1203  p=getattr(self.process,prefix)
1204  p+=getattr(self.process, s)
1205  self.schedule.append(getattr(self.process,prefix))
1206  return
1207  else:
1208  #create as many path as many sequences
1209  if not '+' in seq:
1210  if self.nextScheduleIsConditional:
1211  self.conditionalPaths.append(prefix)
1212  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1213  self.schedule.append(getattr(self.process,prefix))
1214  else:
1215  for i,s in enumerate(seq.split('+')):
1216  sn=prefix+'%d'%(i)
1217  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1218  self.schedule.append(getattr(self.process,sn))
1219  return
1220 
1221  def scheduleSequenceAtEnd(self,seq,prefix):
1222  self.scheduleSequence(seq,prefix,what='EndPath')
1223  return
1224 
1225  def prepare_ALCAPRODUCER(self, sequence = None):
1226  self.prepare_ALCA(sequence, workflow = "producers")
1227 
1228  def prepare_ALCAOUTPUT(self, sequence = None):
1229  self.prepare_ALCA(sequence, workflow = "output")
1230 
1231  def prepare_ALCA(self, sequence = None, workflow = 'full'):
1232  """ Enrich the process with alca streams """
1233  alcaConfig=self.loadDefaultOrSpecifiedCFF(sequence,self.ALCADefaultCFF)
1234  sequence = sequence.split('.')[-1]
1235 
1236  # decide which ALCA paths to use
1237  alcaList = sequence.split("+")
1238  maxLevel=0
1239  from Configuration.AlCa.autoAlca import autoAlca
1240  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1241  self.expandMapping(alcaList,autoAlca)
1242  self.AlCaPaths=[]
1243  for name in alcaConfig.__dict__:
1244  alcastream = getattr(alcaConfig,name)
1245  shortName = name.replace('ALCARECOStream','')
1246  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1247  output = self.addExtraStream(name,alcastream, workflow = workflow)
1248  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1249  self.AlCaPaths.append(shortName)
1250  if 'DQM' in alcaList:
1251  if not self._options.inlineEventContent and hasattr(self.process,name):
1252  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1253  else:
1254  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1255 
1256  #rename the HLT process name in the alca modules
1257  if self._options.hltProcess or 'HLT' in self.stepMap:
1258  if isinstance(alcastream.paths,tuple):
1259  for path in alcastream.paths:
1260  self.renameHLTprocessInSequence(path.label())
1261  else:
1262  self.renameHLTprocessInSequence(alcastream.paths.label())
1263 
1264  for i in range(alcaList.count(shortName)):
1265  alcaList.remove(shortName)
1266 
1267  # DQM needs a special handling
1268  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1269  path = getattr(alcaConfig,name)
1270  self.schedule.append(path)
1271  alcaList.remove('DQM')
1272 
1273  if isinstance(alcastream,cms.Path):
1274  #black list the alca path so that they do not appear in the cfg
1275  self.blacklist_paths.append(alcastream)
1276 
1277 
1278  if len(alcaList) != 0:
1279  available=[]
1280  for name in alcaConfig.__dict__:
1281  alcastream = getattr(alcaConfig,name)
1282  if isinstance(alcastream,cms.FilteredStream):
1283  available.append(name.replace('ALCARECOStream',''))
1284  print "The following alcas could not be found "+str(alcaList)
1285  print "available ",available
1286  #print "verify your configuration, ignoring for now"
1287  raise Exception("The following alcas could not be found "+str(alcaList))
1288 
1289  def prepare_LHE(self, sequence = None):
1290  #load the fragment
1291  ##make it loadable
1292  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1293  print "Loading lhe fragment from",loadFragment
1294  __import__(loadFragment)
1295  self.process.load(loadFragment)
1296  ##inline the modules
1297  self._options.inlineObjets+=','+sequence
1298 
1299  getattr(self.process,sequence).nEvents = int(self._options.number)
1300 
1301  #schedule it
1302  self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1303  self.excludedPaths.append("lhe_step")
1304  self.schedule.append( self.process.lhe_step )
1305 
1306  def prepare_GEN(self, sequence = None):
1307  """ load the fragment of generator configuration """
1308  loadFailure=False
1309  #remove trailing .py
1310  #support old style .cfi by changing into something.cfi into something_cfi
1311  #remove python/ from the name
1312  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1313  #standard location of fragments
1314  if not '/' in loadFragment:
1315  loadFragment='Configuration.Generator.'+loadFragment
1316  else:
1317  loadFragment=loadFragment.replace('/','.')
1318  try:
1319  print "Loading generator fragment from",loadFragment
1320  __import__(loadFragment)
1321  except:
1322  loadFailure=True
1323  #if self.process.source and self.process.source.type_()=='EmptySource':
1324  if not (self._options.filein or self._options.dasquery):
1325  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1326 
1327  if not loadFailure:
1328  generatorModule=sys.modules[loadFragment]
1329  genModules=generatorModule.__dict__
1330  #remove lhe producer module since this should have been
1331  #imported instead in the LHE step
1332  if self.LHEDefaultSeq in genModules:
1333  del genModules[self.LHEDefaultSeq]
1334 
1335  if self._options.hideGen:
1336  self.loadAndRemember(loadFragment)
1337  else:
1338  self.process.load(loadFragment)
1339  # expose the objects from that fragment to the configuration
1340  import FWCore.ParameterSet.Modules as cmstypes
1341  for name in genModules:
1342  theObject = getattr(generatorModule,name)
1343  if isinstance(theObject, cmstypes._Module):
1344  self._options.inlineObjets=name+','+self._options.inlineObjets
1345  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1346  self._options.inlineObjets+=','+name
1347 
1348  if sequence == self.GENDefaultSeq or sequence == 'pgen_genonly':
1349  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1350  self.productionFilterSequence = 'ProductionFilterSequence'
1351  elif 'generator' in genModules:
1352  self.productionFilterSequence = 'generator'
1353 
1354  """ Enrich the schedule with the rest of the generation step """
1355  self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1356  genSeqName=sequence.split('.')[-1]
1357 
1358  if True:
1359  try:
1360  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1361  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1362  self.loadAndRemember(cffToBeLoaded)
1363  except ImportError:
1364  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1365 
1366  if self._options.scenario == 'HeavyIons':
1367  if self._options.pileup=='HiMixGEN':
1368  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1369  else:
1370  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1371 
1372  self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1373  self.schedule.append(self.process.generation_step)
1374 
1375  #register to the genstepfilter the name of the path (static right now, but might evolve)
1376  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1377 
1378  if 'reGEN' in self.stepMap:
1379  #stop here
1380  return
1381 
1382  """ Enrich the schedule with the summary of the filter step """
1383  #the gen filter in the endpath
1384  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1385  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1386  return
1387 
1388  def prepare_SIM(self, sequence = None):
1389  """ Enrich the schedule with the simulation step"""
1390  self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1391  if not self._options.fast:
1392  if self._options.gflash==True:
1393  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1394 
1395  if self._options.magField=='0T':
1396  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1397  else:
1398  if self._options.magField=='0T':
1399  self.executeAndRemember("process.famosSimHits.UseMagneticField = cms.bool(False)")
1400 
1401  self.scheduleSequence(sequence.split('.')[-1],'simulation_step')
1402  return
1403 
1404  def prepare_DIGI(self, sequence = None):
1405  """ Enrich the schedule with the digitisation step"""
1406  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1407 
1408  if self._options.gflash==True:
1409  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1410 
1411  if sequence == 'pdigi_valid' or sequence == 'pdigi_hi':
1412  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1413 
1414  if sequence != 'pdigi_nogen' and sequence != 'pdigi_valid_nogen' and not self.process.source.type_()=='EmptySource':
1415  if self._options.inputEventContent=='':
1416  self._options.inputEventContent='REGEN'
1417  else:
1418  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1419 
1420 
1421  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1422  return
1423 
1424  def prepare_DIGIPREMIX(self, sequence = None):
1425  """ Enrich the schedule with the digitisation step"""
1426  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1427 
1428  self.loadAndRemember("SimGeneral/MixingModule/digi_noNoise_cfi")
1429 
1430  if sequence == 'pdigi_valid':
1431  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1432  else:
1433  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1434 
1435  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1436  return
1437 
1438  def prepare_DIGIPREMIX_S2(self, sequence = None):
1439  """ Enrich the schedule with the digitisation step"""
1440  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1441 
1442  self.loadAndRemember("SimGeneral/MixingModule/digi_MixPreMix_cfi")
1443 
1444 
1445  if sequence == 'pdigi_valid':
1446  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1447  else:
1448  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1449 
1450  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1451  return
1452 
1453  def prepare_CFWRITER(self, sequence = None):
1454  """ Enrich the schedule with the crossing frame writer step"""
1455  self.loadAndRemember(self.CFWRITERDefaultCFF)
1456  self.scheduleSequence('pcfw','cfwriter_step')
1457  return
1458 
1459  def prepare_DATAMIX(self, sequence = None):
1460  """ Enrich the schedule with the digitisation step"""
1461  self.loadAndRemember(self.DATAMIXDefaultCFF)
1462  self.scheduleSequence('pdatamix','datamixing_step')
1463 
1464  if self._options.pileup_input:
1465  theFiles=''
1466  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1467  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1468  elif self._options.pileup_input.startswith("filelist:"):
1469  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1470  else:
1471  theFiles=self._options.pileup_input.split(',')
1472  #print theFiles
1473  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1474 
1475  return
1476 
1477  def prepare_DIGI2RAW(self, sequence = None):
1478  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGI2RAWDefaultCFF)
1479  self.scheduleSequence(sequence.split('.')[-1],'digi2raw_step')
1480  if "DIGIPREMIX" in self.stepMap.keys():
1481  self.executeAndRemember("process.esDigiToRaw.Label = cms.string('mix')") ##terrible hack - bypass zero suppression
1482  self.executeAndRemember("process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')") ##special readout mode for StripTracker
1483 
1484  return
1485 
1486  def prepare_REPACK(self, sequence = None):
1487  self.loadDefaultOrSpecifiedCFF(sequence,self.REPACKDefaultCFF)
1488  self.scheduleSequence(sequence.split('.')[-1],'digi2repack_step')
1489  return
1490 
1491  def prepare_L1(self, sequence = None):
1492  """ Enrich the schedule with the L1 simulation step"""
1493  assert(sequence == None)
1494  self.loadAndRemember(self.L1EMDefaultCFF)
1495  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1496  return
1497 
1498  def prepare_L1REPACK(self, sequence = None):
1499  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1500  supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT']
1501  if sequence in supported:
1502  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1503  if self._options.scenario == 'HeavyIons':
1504  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1505  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1506  else:
1507  print "L1REPACK with '",sequence,"' is not supported! Supported choices are: ",supported
1508  raise Exception('unsupported feature')
1509 
1510 
1511  def prepare_HLT(self, sequence = None):
1512  """ Enrich the schedule with the HLT simulation step"""
1513  if not sequence:
1514  print "no specification of the hlt menu has been given, should never happen"
1515  raise Exception('no HLT sequence provided')
1516 
1517  if '@' in sequence:
1518  # case where HLT:@something was provided
1519  from Configuration.HLT.autoHLT import autoHLT
1520  key = sequence[1:]
1521  if key in autoHLT:
1522  sequence = autoHLT[key]
1523  else:
1524  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1525 
1526  if ',' in sequence:
1527  #case where HLT:something:something was provided
1528  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1529  optionsForHLT = {}
1530  if self._options.scenario == 'HeavyIons':
1531  optionsForHLT['type'] = 'HIon'
1532  else:
1533  optionsForHLT['type'] = 'GRun'
1534  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.iteritems())
1535  if sequence == 'run,fromSource':
1536  if hasattr(self.process.source,'firstRun'):
1537  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1538  elif hasattr(self.process.source,'setRunNumber'):
1539  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1540  else:
1541  raise Exception('Cannot replace menu to load %s'%(sequence))
1542  else:
1543  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(sequence.replace(',',':'),optionsForHLTConfig))
1544  else:
1545  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % sequence)
1546 
1547  if self._options.isMC:
1548  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1549 
1550  if self._options.name != 'HLT':
1551  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1552  self.additionalCommands.append('process = ProcessName(process)')
1553  self.additionalCommands.append('')
1554  from HLTrigger.Configuration.CustomConfigs import ProcessName
1555  self.process = ProcessName(self.process)
1556 
1557  self.schedule.append(self.process.HLTSchedule)
1558  [self.blacklist_paths.append(path) for path in self.process.HLTSchedule if isinstance(path,(cms.Path,cms.EndPath))]
1559 
1560  #this is a fake, to be removed with fastim migration and HLT menu dump
1561  if self._options.fast:
1562  if not hasattr(self.process,'HLTEndSequence'):
1563  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1564 
1565 
1566  def prepare_RAW2RECO(self, sequence = None):
1567  if ','in sequence:
1568  seqReco=sequence.split(',')[1]
1569  seqDigi=sequence.split(',')[0]
1570  else:
1571  print "RAW2RECO requires two specifications",sequence,"insufficient"
1572 
1573  self.prepare_RAW2DIGI(seqDigi)
1574  self.prepare_RECO(seqReco)
1575  return
1576 
1577  def prepare_RAW2DIGI(self, sequence = "RawToDigi"):
1578  self.loadDefaultOrSpecifiedCFF(sequence,self.RAW2DIGIDefaultCFF)
1579  self.scheduleSequence(sequence,'raw2digi_step')
1580  # if self._options.isRepacked:
1581  #self.renameInputTagsInSequence(sequence)
1582  return
1583 
1584  def prepare_PATFILTER(self, sequence=None):
1585  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1586  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1587  for filt in allMetFilterPaths:
1588  self.schedule.append(getattr(self.process,'Flag_'+filt))
1589 
1590  def prepare_L1HwVal(self, sequence = 'L1HwVal'):
1591  ''' Enrich the schedule with L1 HW validation '''
1592  self.loadDefaultOrSpecifiedCFF(sequence,self.L1HwValDefaultCFF)
1593  #self.scheduleSequence(sequence.split('.')[-1],'l1hwval_step')
1594  print '\n\n\n DEPRECATED this has no action \n\n\n'
1595  return
1596 
1597  def prepare_L1Reco(self, sequence = "L1Reco"):
1598  ''' Enrich the schedule with L1 reconstruction '''
1599  self.loadDefaultOrSpecifiedCFF(sequence,self.L1RecoDefaultCFF)
1600  self.scheduleSequence(sequence.split('.')[-1],'L1Reco_step')
1601  return
1602 
1603  def prepare_L1TrackTrigger(self, sequence = "L1TrackTrigger"):
1604  ''' Enrich the schedule with L1 reconstruction '''
1606  self.scheduleSequence(sequence.split('.')[-1],'L1TrackTrigger_step')
1607  return
1608 
1609  def prepare_FILTER(self, sequence = None):
1610  ''' Enrich the schedule with a user defined filter sequence '''
1611  ## load the relevant part
1612  filterConfig=self.load(sequence.split('.')[0])
1613  filterSeq=sequence.split('.')[-1]
1614  ## print it in the configuration
1616  def __init__(self):
1617  self.inliner=''
1618  pass
1619  def enter(self,visitee):
1620  try:
1621  label=visitee.label()
1622  ##needs to be in reverse order
1623  self.inliner=label+','+self.inliner
1624  except:
1625  pass
1626  def leave(self,v): pass
1627 
1628  expander=PrintAllModules()
1629  getattr(self.process,filterSeq).visit( expander )
1630  self._options.inlineObjets+=','+expander.inliner
1631  self._options.inlineObjets+=','+filterSeq
1632 
1633  ## put the filtering path in the schedule
1634  self.scheduleSequence(filterSeq,'filtering_step')
1635  self.nextScheduleIsConditional=True
1636  ## put it before all the other paths
1637  self.productionFilterSequence = filterSeq
1638 
1639  return
1640 
1641  def prepare_RECO(self, sequence = "reconstruction"):
1642  ''' Enrich the schedule with reconstruction '''
1643  self.loadDefaultOrSpecifiedCFF(sequence,self.RECODefaultCFF)
1644  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_step')
1645  return
1646 
1647  def prepare_RECOBEFMIX(self, sequence = "reconstruction"):
1648  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1649  if not self._options.fast:
1650  print "ERROR: this step is only implemented for FastSim"
1651  sys.exit()
1652  self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1653  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_befmix_step')
1654  return
1655 
1656  def prepare_PAT(self, sequence = "miniAOD"):
1657  ''' Enrich the schedule with PAT '''
1658  self.prepare_PATFILTER(self)
1659  self.loadDefaultOrSpecifiedCFF(sequence,self.PATDefaultCFF)
1660  self.labelsToAssociate.append('patTask')
1661  if not self._options.runUnscheduled:
1662  raise Exception("MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1663  if self._options.isData:
1664  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1665  else:
1666  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1667  if self._options.fast:
1668  self._options.customisation_file_unsch.insert(1,"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff.miniAOD_customizeMETFiltersFastSim")
1669 
1670  if self._options.hltProcess:
1671  if len(self._options.customise_commands) > 1:
1672  self._options.customise_commands = self._options.customise_commands + " \n"
1673  self._options.customise_commands = self._options.customise_commands + "process.patTrigger.processName = \""+self._options.hltProcess+"\"\n"
1674  self._options.customise_commands = self._options.customise_commands + "process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1675 
1676 # self.renameHLTprocessInSequence(sequence)
1677 
1678  return
1679 
1680  def prepare_PATGEN(self, sequence = "miniGEN"):
1681  ''' Enrich the schedule with PATGEN '''
1682  self.loadDefaultOrSpecifiedCFF(sequence,self.PATGENDefaultCFF) #this is unscheduled
1683  self.labelsToAssociate.append('patGENTask')
1684  if not self._options.runUnscheduled:
1685  raise Exception("MiniGEN production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1686  if self._options.isData:
1687  raise Exception("PATGEN step can only run on MC")
1688  return
1689 
1690  def prepare_EI(self, sequence = None):
1691  ''' Enrich the schedule with event interpretation '''
1692  from Configuration.StandardSequences.EventInterpretation import EventInterpretation
1693  if sequence in EventInterpretation:
1694  self.EIDefaultCFF = EventInterpretation[sequence]
1695  sequence = 'EIsequence'
1696  else:
1697  raise Exception('Cannot set %s event interpretation'%( sequence) )
1698  self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1699  self.scheduleSequence(sequence.split('.')[-1],'eventinterpretaion_step')
1700  return
1701 
1702  def prepare_SKIM(self, sequence = "all"):
1703  ''' Enrich the schedule with skimming fragments'''
1704  skimConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.SKIMDefaultCFF)
1705  sequence = sequence.split('.')[-1]
1706 
1707  skimlist=sequence.split('+')
1708  ## support @Mu+DiJet+@Electron configuration via autoSkim.py
1709  from Configuration.Skimming.autoSkim import autoSkim
1710  self.expandMapping(skimlist,autoSkim)
1711 
1712  #print "dictionnary for skims:",skimConfig.__dict__
1713  for skim in skimConfig.__dict__:
1714  skimstream = getattr(skimConfig,skim)
1715  if isinstance(skimstream,cms.Path):
1716  #black list the alca path so that they do not appear in the cfg
1717  self.blacklist_paths.append(skimstream)
1718  if (not isinstance(skimstream,cms.FilteredStream)):
1719  continue
1720  shortname = skim.replace('SKIMStream','')
1721  if (sequence=="all"):
1722  self.addExtraStream(skim,skimstream)
1723  elif (shortname in skimlist):
1724  self.addExtraStream(skim,skimstream)
1725  #add a DQM eventcontent for this guy
1726  if self._options.datatier=='DQM':
1727  self.process.load(self.EVTCONTDefaultCFF)
1728  skimstreamDQM = cms.FilteredStream(
1729  responsible = skimstream.responsible,
1730  name = skimstream.name+'DQM',
1731  paths = skimstream.paths,
1732  selectEvents = skimstream.selectEvents,
1733  content = self._options.datatier+'EventContent',
1734  dataTier = cms.untracked.string(self._options.datatier)
1735  )
1736  self.addExtraStream(skim+'DQM',skimstreamDQM)
1737  for i in range(skimlist.count(shortname)):
1738  skimlist.remove(shortname)
1739 
1740 
1741 
1742  if (skimlist.__len__()!=0 and sequence!="all"):
1743  print 'WARNING, possible typo with SKIM:'+'+'.join(skimlist)
1744  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1745 
1746  def prepare_USER(self, sequence = None):
1747  ''' Enrich the schedule with a user defined sequence '''
1748  self.loadDefaultOrSpecifiedCFF(sequence,self.USERDefaultCFF)
1749  self.scheduleSequence(sequence.split('.')[-1],'user_step')
1750  return
1751 
1752  def prepare_POSTRECO(self, sequence = None):
1753  """ Enrich the schedule with the postreco step """
1754  self.loadAndRemember(self.POSTRECODefaultCFF)
1755  self.scheduleSequence('postreco_generator','postreco_step')
1756  return
1757 
1758 
1759  def prepare_VALIDATION(self, sequence = 'validation'):
1760  print sequence,"in preparing validation"
1761  self.loadDefaultOrSpecifiedCFF(sequence,self.VALIDATIONDefaultCFF)
1762  from Validation.Configuration.autoValidation import autoValidation
1763  #in case VALIDATION:something:somethingelse -> something,somethingelse
1764  sequence=sequence.split('.')[-1]
1765  if sequence.find(',')!=-1:
1766  prevalSeqName=sequence.split(',')[0].split('+')
1767  valSeqName=sequence.split(',')[1].split('+')
1768  self.expandMapping(prevalSeqName,autoValidation,index=0)
1769  self.expandMapping(valSeqName,autoValidation,index=1)
1770  else:
1771  if '@' in sequence:
1772  prevalSeqName=sequence.split('+')
1773  valSeqName=sequence.split('+')
1774  self.expandMapping(prevalSeqName,autoValidation,index=0)
1775  self.expandMapping(valSeqName,autoValidation,index=1)
1776  else:
1777  postfix=''
1778  if sequence:
1779  postfix='_'+sequence
1780  prevalSeqName=['prevalidation'+postfix]
1781  valSeqName=['validation'+postfix]
1782  if not hasattr(self.process,valSeqName[0]):
1783  prevalSeqName=['']
1784  valSeqName=[sequence]
1785 
1786  def NFI(index):
1787  ##name from index, required to keep backward compatibility
1788  if index==0:
1789  return ''
1790  else:
1791  return '%s'%index
1792 
1793 
1794  #rename the HLT process in validation steps
1795  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1796  for s in valSeqName+prevalSeqName:
1797  if s:
1799  for (i,s) in enumerate(prevalSeqName):
1800  if s:
1801  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1802  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1803 
1804  for (i,s) in enumerate(valSeqName):
1805  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1806  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1807 
1808  #needed in case the miniAODValidation sequence is run starting from AODSIM
1809  if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1810  return
1811 
1812  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1813  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1814  self._options.restoreRNDSeeds=True
1815 
1816  if not 'DIGI' in self.stepMap and not self._options.fast:
1817  self.executeAndRemember("process.mix.playback = True")
1818  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1819  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1820  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1821 
1822  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1823  #will get in the schedule, smoothly
1824  for (i,s) in enumerate(valSeqName):
1825  getattr(self.process,'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,'validation_step%s'%NFI(i))._seq
1826 
1827  return
1828 
1829 
1831  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1832  It will climb down within PSets, VPSets and VInputTags to find its target"""
1833  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1834  self._paramReplace = paramReplace
1835  self._paramSearch = paramSearch
1836  self._verbose = verbose
1837  self._whitelist = whitelist
1838 
1839  def doIt(self,pset,base):
1840  if isinstance(pset, cms._Parameterizable):
1841  for name in pset.parameters_().keys():
1842  # skip whitelisted parameters
1843  if name in self._whitelist:
1844  continue
1845  # if I use pset.parameters_().items() I get copies of the parameter values
1846  # so I can't modify the nested pset
1847  value = getattr(pset,name)
1848  type = value.pythonTypeName()
1849  if type in ('cms.PSet', 'cms.untracked.PSet'):
1850  self.doIt(value,base+"."+name)
1851  elif type in ('cms.VPSet', 'cms.untracked.VPSet'):
1852  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1853  elif type in ('cms.string', 'cms.untracked.string'):
1854  if value.value() == self._paramSearch:
1855  if self._verbose: print "set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace)
1856  setattr(pset, name,self._paramReplace)
1857  elif type in ('cms.VInputTag', 'cms.untracked.VInputTag'):
1858  for (i,n) in enumerate(value):
1859  if not isinstance(n, cms.InputTag):
1860  n=cms.InputTag(n)
1861  if n.processName == self._paramSearch:
1862  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1863  if self._verbose:print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace)
1864  setattr(n,"processName",self._paramReplace)
1865  value[i]=n
1866  elif type in ('cms.vstring', 'cms.untracked.vstring'):
1867  for (i,n) in enumerate(value):
1868  if n==self._paramSearch:
1869  getattr(pset,name)[i]=self._paramReplace
1870  elif type in ('cms.InputTag', 'cms.untracked.InputTag'):
1871  if value.processName == self._paramSearch:
1872  if self._verbose: print "set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace)
1873  setattr(getattr(pset, name),"processName",self._paramReplace)
1874 
1875  def enter(self,visitee):
1876  label = ''
1877  try:
1878  label = visitee.label()
1879  except AttributeError:
1880  label = '<Module not in a Process>'
1881  except:
1882  label = 'other execption'
1883  self.doIt(visitee, label)
1884 
1885  def leave(self,visitee):
1886  pass
1887 
1888  #visit a sequence to repalce all input tags
1889  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1890  print "Replacing all InputTag %s => %s"%(oldT,newT)
1891  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1892  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1893  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1894  if not loadMe in self.additionalCommands:
1895  self.additionalCommands.append(loadMe)
1896  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1897 
1898  #change the process name used to address HLT results in any sequence
1899  def renameHLTprocessInSequence(self,sequence,proc=None,HLTprocess='HLT'):
1900  if self._options.hltProcess:
1901  proc=self._options.hltProcess
1902  else:
1903  proc=self.process.name_()
1904  if proc==HLTprocess: return
1905  # look up all module in dqm sequence
1906  print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1907  getattr(self.process,sequence).visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess,proc,whitelist = ("subSystemFolder",)))
1908  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1909  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1910  self.additionalCommands.append('process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1911 
1912 
1913  def expandMapping(self,seqList,mapping,index=None):
1914  maxLevel=20
1915  level=0
1916  while '@' in repr(seqList) and level<maxLevel:
1917  level+=1
1918  for specifiedCommand in seqList:
1919  if specifiedCommand.startswith('@'):
1920  location=specifiedCommand[1:]
1921  if not location in mapping:
1922  raise Exception("Impossible to map "+location+" from "+repr(mapping))
1923  mappedTo=mapping[location]
1924  if index!=None:
1925  mappedTo=mappedTo[index]
1926  seqList.remove(specifiedCommand)
1927  seqList.extend(mappedTo.split('+'))
1928  break;
1929  if level==maxLevel:
1930  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
1931 
1932  def prepare_DQM(self, sequence = 'DQMOffline'):
1933  # this one needs replacement
1934 
1935  self.loadDefaultOrSpecifiedCFF(sequence,self.DQMOFFLINEDefaultCFF)
1936  sequenceList=sequence.split('.')[-1].split('+')
1937  postSequenceList=sequence.split('.')[-1].split('+')
1938  from DQMOffline.Configuration.autoDQM import autoDQM
1939  self.expandMapping(sequenceList,autoDQM,index=0)
1940  self.expandMapping(postSequenceList,autoDQM,index=1)
1941 
1942  if len(set(sequenceList))!=len(sequenceList):
1943  sequenceList=list(set(sequenceList))
1944  print "Duplicate entries for DQM:, using",sequenceList
1945 
1946  pathName='dqmoffline_step'
1947  for (i,sequence) in enumerate(sequenceList):
1948  if (i!=0):
1949  pathName='dqmoffline_%d_step'%(i)
1950 
1951  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
1952  self.renameHLTprocessInSequence(sequence)
1953 
1954  setattr(self.process,pathName, cms.EndPath( getattr(self.process,sequence ) ) )
1955  self.schedule.append(getattr(self.process,pathName))
1956 
1957  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1958  #will get in the schedule, smoothly
1959  getattr(self.process,pathName).insert(0,self.process.genstepfilter)
1960 
1961  pathName='dqmofflineOnPAT_step'
1962  for (i,sequence) in enumerate(postSequenceList):
1963  if (i!=0):
1964  pathName='dqmofflineOnPAT_%d_step'%(i)
1965 
1966  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1967  self.schedule.append(getattr(self.process,pathName))
1968 
1969  def prepare_HARVESTING(self, sequence = None):
1970  """ Enrich the process with harvesting step """
1971  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
1972  self.loadAndRemember(self.DQMSaverCFF)
1973 
1974  harvestingConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.HARVESTINGDefaultCFF)
1975  sequence = sequence.split('.')[-1]
1976 
1977  # decide which HARVESTING paths to use
1978  harvestingList = sequence.split("+")
1979  from DQMOffline.Configuration.autoDQM import autoDQM
1980  from Validation.Configuration.autoValidation import autoValidation
1981  import copy
1982  combined_mapping = copy.deepcopy( autoDQM )
1983  combined_mapping.update( autoValidation )
1984  self.expandMapping(harvestingList,combined_mapping,index=-1)
1985 
1986  if len(set(harvestingList))!=len(harvestingList):
1987  harvestingList=list(set(harvestingList))
1988  print "Duplicate entries for HARVESTING, using",harvestingList
1989 
1990  for name in harvestingList:
1991  if not name in harvestingConfig.__dict__:
1992  print name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1993  continue
1994  harvestingstream = getattr(harvestingConfig,name)
1995  if isinstance(harvestingstream,cms.Path):
1996  self.schedule.append(harvestingstream)
1997  self.blacklist_paths.append(harvestingstream)
1998  if isinstance(harvestingstream,cms.Sequence):
1999  setattr(self.process,name+"_step",cms.Path(harvestingstream))
2000  self.schedule.append(getattr(self.process,name+"_step"))
2001 
2002  self.scheduleSequence('DQMSaver','dqmsave_step')
2003  return
2004 
2005  def prepare_ALCAHARVEST(self, sequence = None):
2006  """ Enrich the process with AlCaHarvesting step """
2007  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2008  sequence=sequence.split(".")[-1]
2009 
2010  # decide which AlcaHARVESTING paths to use
2011  harvestingList = sequence.split("+")
2012 
2013 
2014 
2015  from Configuration.AlCa.autoPCL import autoPCL
2016  self.expandMapping(harvestingList,autoPCL)
2017 
2018  for name in harvestingConfig.__dict__:
2019  harvestingstream = getattr(harvestingConfig,name)
2020  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2021  self.schedule.append(harvestingstream)
2022  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2023  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2024  harvestingList.remove(name)
2025  # append the common part at the end of the sequence
2026  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2027  self.schedule.append(lastStep)
2028 
2029  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2030  print "The following harvesting could not be found : ", harvestingList
2031  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2032 
2033 
2034 
2035  def prepare_ENDJOB(self, sequence = 'endOfProcess'):
2036  self.loadDefaultOrSpecifiedCFF(sequence,self.ENDJOBDefaultCFF)
2037  self.scheduleSequenceAtEnd(sequence.split('.')[-1],'endjob_step')
2038  return
2039 
2041  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2042  self.schedule.append(self.process.reconstruction)
2043 
2044 
2045  def build_production_info(self, evt_type, evtnumber):
2046  """ Add useful info for the production. """
2047  self.process.configurationMetadata=cms.untracked.PSet\
2048  (version=cms.untracked.string("$Revision: 1.19 $"),
2049  name=cms.untracked.string("Applications"),
2050  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2051  )
2052 
2053  self.addedObjects.append(("Production Info","configurationMetadata"))
2054 
2055 
2056  def prepare(self, doChecking = False):
2057  """ Prepare the configuration string and add missing pieces."""
2058 
2059  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2060  self.addMaxEvents()
2061  if self.with_input:
2062  self.addSource()
2063  self.addStandardSequences()
2064  ##adding standard sequences might change the inputEventContent option and therefore needs to be finalized after
2065  self.completeInputCommand()
2066  self.addConditions()
2067 
2068 
2069  outputModuleCfgCode=""
2070  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2071  outputModuleCfgCode=self.addOutput()
2072 
2073  self.addCommon()
2074 
2075  self.pythonCfgCode = "# Auto generated configuration file\n"
2076  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2077  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2078  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2079  if hasattr(self._options,"era") and self._options.era :
2080  self.pythonCfgCode += "from Configuration.StandardSequences.Eras import eras\n\n"
2081  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"'" # Start of the line, finished after the loop
2082  # Multiple eras can be specified in a comma seperated list
2083  for requestedEra in self._options.era.split(",") :
2084  self.pythonCfgCode += ",eras."+requestedEra
2085  self.pythonCfgCode += ")\n\n" # end of the line
2086  else :
2087  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"')\n\n"
2088 
2089  self.pythonCfgCode += "# import of standard configurations\n"
2090  for module in self.imports:
2091  self.pythonCfgCode += ("process.load('"+module+"')\n")
2092 
2093  # production info
2094  if not hasattr(self.process,"configurationMetadata"):
2095  self.build_production_info(self._options.evt_type, self._options.number)
2096  else:
2097  #the PSet was added via a load
2098  self.addedObjects.append(("Production Info","configurationMetadata"))
2099 
2100  self.pythonCfgCode +="\n"
2101  for comment,object in self.addedObjects:
2102  if comment!="":
2103  self.pythonCfgCode += "\n# "+comment+"\n"
2104  self.pythonCfgCode += dumpPython(self.process,object)
2105 
2106  # dump the output definition
2107  self.pythonCfgCode += "\n# Output definition\n"
2108  self.pythonCfgCode += outputModuleCfgCode
2109 
2110  # dump all additional outputs (e.g. alca or skim streams)
2111  self.pythonCfgCode += "\n# Additional output definition\n"
2112  #I do not understand why the keys are not normally ordered.
2113  nl=self.additionalOutputs.keys()
2114  nl.sort()
2115  for name in nl:
2116  output = self.additionalOutputs[name]
2117  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2118  tmpOut = cms.EndPath(output)
2119  setattr(self.process,name+'OutPath',tmpOut)
2120  self.schedule.append(tmpOut)
2121 
2122  # dump all additional commands
2123  self.pythonCfgCode += "\n# Other statements\n"
2124  for command in self.additionalCommands:
2125  self.pythonCfgCode += command + "\n"
2126 
2127  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2128  for object in self._options.inlineObjets.split(','):
2129  if not object:
2130  continue
2131  if not hasattr(self.process,object):
2132  print 'cannot inline -'+object+'- : not known'
2133  else:
2134  self.pythonCfgCode +='\n'
2135  self.pythonCfgCode +=dumpPython(self.process,object)
2136 
2137  # dump all paths
2138  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2139  for path in self.process.paths:
2140  if getattr(self.process,path) not in self.blacklist_paths:
2141  self.pythonCfgCode += dumpPython(self.process,path)
2142 
2143  for endpath in self.process.endpaths:
2144  if getattr(self.process,endpath) not in self.blacklist_paths:
2145  self.pythonCfgCode += dumpPython(self.process,endpath)
2146 
2147  # dump the schedule
2148  self.pythonCfgCode += "\n# Schedule definition\n"
2149  result = "process.schedule = cms.Schedule("
2150 
2151  # handling of the schedule
2152  self.process.schedule = cms.Schedule()
2153  for item in self.schedule:
2154  if not isinstance(item, cms.Schedule):
2155  self.process.schedule.append(item)
2156  else:
2157  self.process.schedule.extend(item)
2158 
2159  if hasattr(self.process,"HLTSchedule"):
2160  beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2161  afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2162  pathNames = ['process.'+p.label_() for p in beforeHLT]
2163  result += ','.join(pathNames)+')\n'
2164  result += 'process.schedule.extend(process.HLTSchedule)\n'
2165  pathNames = ['process.'+p.label_() for p in afterHLT]
2166  result += 'process.schedule.extend(['+','.join(pathNames)+'])\n'
2167  else:
2168  pathNames = ['process.'+p.label_() for p in self.schedule]
2169  result ='process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2170 
2171  self.pythonCfgCode += result
2172 
2173  for labelToAssociate in self.labelsToAssociate:
2174  self.process.schedule.associate(getattr(self.process, labelToAssociate))
2175  self.pythonCfgCode += 'process.schedule.associate(process.' + labelToAssociate + ')\n'
2176 
2177  from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
2178  associatePatAlgosToolsTask(self.process)
2179  self.pythonCfgCode+="from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n"
2180  self.pythonCfgCode+="associatePatAlgosToolsTask(process)\n"
2181 
2182  if self._options.nThreads is not "1":
2183  self.pythonCfgCode +="\n"
2184  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2185  self.pythonCfgCode +="process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+")\n"
2186  self.pythonCfgCode +="process.options.numberOfStreams=cms.untracked.uint32(0)\n"
2187  #repacked version
2188  if self._options.isRepacked:
2189  self.pythonCfgCode +="\n"
2190  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2191  self.pythonCfgCode +="MassReplaceInputTag(process)\n"
2192  MassReplaceInputTag(self.process)
2193 
2194  # special treatment in case of production filter sequence 2/2
2195  if self.productionFilterSequence:
2196  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2197  self.pythonCfgCode +='for path in process.paths:\n'
2198  if len(self.conditionalPaths):
2199  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2200  if len(self.excludedPaths):
2201  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2202  self.pythonCfgCode +='\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2203  pfs = getattr(self.process,self.productionFilterSequence)
2204  for path in self.process.paths:
2205  if not path in self.conditionalPaths: continue
2206  if path in self.excludedPaths: continue
2207  getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2208 
2209 
2210  # dump customise fragment
2211  self.pythonCfgCode += self.addCustomise()
2212 
2213  if self._options.runUnscheduled:
2214  # prune and delete paths
2215  #this is not supporting the blacklist at this point since I do not understand it
2216  self.pythonCfgCode+="#do not add changes to your config after this point (unless you know what you are doing)\n"
2217  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import convertToUnscheduled\n"
2218  self.pythonCfgCode+="process=convertToUnscheduled(process)\n"
2219 
2220  from FWCore.ParameterSet.Utilities import convertToUnscheduled
2221  self.process=convertToUnscheduled(self.process)
2222 
2223  self.pythonCfgCode += self.addCustomise(1)
2224 
2225  self.pythonCfgCode += self.addCustomiseCmdLine()
2226 
2227  # Temporary hack to put the early delete customization after
2228  # everything else
2229  #
2230  # FIXME: remove when no longer needed
2231  self.pythonCfgCode += "\n# Add early deletion of temporary data products to reduce peak memory need\n"
2232  self.pythonCfgCode += "from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2233  self.pythonCfgCode += "process = customiseEarlyDelete(process)\n"
2234  self.pythonCfgCode += "# End adding early deletion\n"
2235  from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
2236  self.process = customiseEarlyDelete(self.process)
2237 
2238 
2239  # make the .io file
2240 
2241  if self._options.io:
2242  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2243  if not self._options.io.endswith('.io'): self._option.io+='.io'
2244  io=open(self._options.io,'w')
2245  ioJson={}
2246  if hasattr(self.process.source,"fileNames"):
2247  if len(self.process.source.fileNames.value()):
2248  ioJson['primary']=self.process.source.fileNames.value()
2249  if hasattr(self.process.source,"secondaryFileNames"):
2250  if len(self.process.source.secondaryFileNames.value()):
2251  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2252  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2253  ioJson['pileup']=self._options.pileup_input[4:]
2254  for (o,om) in self.process.outputModules_().items():
2255  ioJson[o]=om.fileName.value()
2256  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2257  if self.productionFilterSequence:
2258  ioJson['filter']=self.productionFilterSequence
2259  import json
2260  io.write(json.dumps(ioJson))
2261  return
2262 
def load(self, includeFile)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:37
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
Definition: MassReplace.py:71
def prepare_L1REPACK(self, sequence=None)
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def doNotInlineEventContent(instance, label="cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)")
event content
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def ProcessName(process)
Definition: CustomConfigs.py:8
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_DIGIPREMIX(self, sequence=None)
def prepare_RECO(self, sequence="reconstruction")
put the filtering path in the schedule
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def prepare_PATGEN(self, sequence="miniGEN")
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def addCustomise(self, unsch=0)
def prepare_DIGIPREMIX_S2(self, sequence=None)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
now for a useful command
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
Definition: HCMethods.h:49
def prepare_DIGI(self, sequence=None)
def convertToUnscheduled(proc)
Definition: Utilities.py:45
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
inliner
needs to be in reverse order
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def prepare_ALCAHARVEST(self, sequence=None)
def defineMixing(dict)
Definition: Mixing.py:177
def dumpPython(process, name)
def miniAOD_customizeOutput(out)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
Definition: helpers.py:23
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
double split
Definition: MVATrainer.cc:139
def completeInputCommand(self)
add the corresponding input content
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def prepare_PAT(self, sequence="miniAOD")