CMS 3D CMS Logo

ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python
2 
3 __version__ = "$Revision: 1.19 $"
4 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
5 
6 import FWCore.ParameterSet.Config as cms
7 from FWCore.ParameterSet.Modules import _Module
8 import sys
9 import re
10 import collections
11 from subprocess import Popen,PIPE
12 import FWCore.ParameterSet.DictTypes as DictTypes
13 class Options:
14  pass
15 
16 # the canonical defaults
17 defaultOptions = Options()
18 defaultOptions.datamix = 'DataOnSim'
19 defaultOptions.isMC=False
20 defaultOptions.isData=True
21 defaultOptions.step=''
22 defaultOptions.pileup='NoPileUp'
23 defaultOptions.pileup_input = None
24 defaultOptions.pileup_dasoption = ''
25 defaultOptions.geometry = 'SimDB'
26 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
27 defaultOptions.magField = ''
28 defaultOptions.conditions = None
29 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
30 defaultOptions.harvesting= 'AtRunEnd'
31 defaultOptions.gflash = False
32 defaultOptions.number = -1
33 defaultOptions.number_out = None
34 defaultOptions.arguments = ""
35 defaultOptions.name = "NO NAME GIVEN"
36 defaultOptions.evt_type = ""
37 defaultOptions.filein = ""
38 defaultOptions.dasquery=""
39 defaultOptions.dasoption=""
40 defaultOptions.secondfilein = ""
41 defaultOptions.customisation_file = []
42 defaultOptions.customisation_file_unsch = []
43 defaultOptions.customise_commands = ""
44 defaultOptions.inline_custom=False
45 defaultOptions.particleTable = 'pythiapdt'
46 defaultOptions.particleTableList = ['pythiapdt','pdt']
47 defaultOptions.dirin = ''
48 defaultOptions.dirout = ''
49 defaultOptions.filetype = 'EDM'
50 defaultOptions.fileout = 'output.root'
51 defaultOptions.filtername = ''
52 defaultOptions.lazy_download = False
53 defaultOptions.custom_conditions = ''
54 defaultOptions.hltProcess = ''
55 defaultOptions.eventcontent = None
56 defaultOptions.datatier = None
57 defaultOptions.inlineEventContent = True
58 defaultOptions.inlineObjets =''
59 defaultOptions.hideGen=False
60 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
61 defaultOptions.beamspot=None
62 defaultOptions.outputDefinition =''
63 defaultOptions.inputCommands = None
64 defaultOptions.outputCommands = None
65 defaultOptions.inputEventContent = ''
66 defaultOptions.dropDescendant = False
67 defaultOptions.relval = None
68 defaultOptions.profile = None
69 defaultOptions.isRepacked = False
70 defaultOptions.restoreRNDSeeds = False
71 defaultOptions.donotDropOnInput = ''
72 defaultOptions.python_filename =''
73 defaultOptions.io=None
74 defaultOptions.lumiToProcess=None
75 defaultOptions.fast=False
76 defaultOptions.runsAndWeightsForMC = None
77 defaultOptions.runsScenarioForMC = None
78 defaultOptions.runUnscheduled = False
79 defaultOptions.timeoutOutput = False
80 defaultOptions.nThreads = '1'
81 
82 # some helper routines
83 def dumpPython(process,name):
84  theObject = getattr(process,name)
85  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
86  return "process."+name+" = " + theObject.dumpPython("process")
87  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
88  return "process."+name+" = " + theObject.dumpPython()+"\n"
89  else:
90  return "process."+name+" = " + theObject.dumpPython()+"\n"
91 def filesFromList(fileName,s=None):
92  import os
93  import FWCore.ParameterSet.Config as cms
94  prim=[]
95  sec=[]
96  for line in open(fileName,'r'):
97  if line.count(".root")>=2:
98  #two files solution...
99  entries=line.replace("\n","").split()
100  if not entries[0] in prim:
101  prim.append(entries[0])
102  if not entries[1] in sec:
103  sec.append(entries[1])
104  elif (line.find(".root")!=-1):
105  entry=line.replace("\n","")
106  if not entry in prim:
107  prim.append(entry)
108  if s:
109  if not hasattr(s,"fileNames"):
110  s.fileNames=cms.untracked.vstring(prim)
111  else:
112  s.fileNames.extend(prim)
113  if len(sec)!=0:
114  if not hasattr(s,"secondaryFileNames"):
115  s.secondaryFileNames=cms.untracked.vstring(sec)
116  else:
117  s.secondaryFileNames.extend(sec)
118  print "found files: ",prim
119  if len(prim)==0:
120  raise Exception("There are not files in input from the file list")
121  if len(sec)!=0:
122  print "found parent files:",sec
123  return (prim,sec)
124 
125 def filesFromDASQuery(query,option="",s=None):
126  import os,time
127  import FWCore.ParameterSet.Config as cms
128  prim=[]
129  sec=[]
130  print "the query is",query
131  eC=5
132  count=0
133  while eC!=0 and count<3:
134  if count!=0:
135  print 'Sleeping, then retrying DAS'
136  time.sleep(100)
137  p = Popen('das_client %s --query "%s"'%(option,query), stdout=PIPE,shell=True)
138  pipe=p.stdout.read()
139  tupleP = os.waitpid(p.pid, 0)
140  eC=tupleP[1]
141  count=count+1
142  if eC==0:
143  print "DAS succeeded after",count,"attempts",eC
144  else:
145  print "DAS failed 3 times- I give up"
146  for line in pipe.split('\n'):
147  if line.count(".root")>=2:
148  #two files solution...
149  entries=line.replace("\n","").split()
150  if not entries[0] in prim:
151  prim.append(entries[0])
152  if not entries[1] in sec:
153  sec.append(entries[1])
154  elif (line.find(".root")!=-1):
155  entry=line.replace("\n","")
156  if not entry in prim:
157  prim.append(entry)
158  if s:
159  if not hasattr(s,"fileNames"):
160  s.fileNames=cms.untracked.vstring(prim)
161  else:
162  s.fileNames.extend(prim)
163  if len(sec)!=0:
164  if not hasattr(s,"secondaryFileNames"):
165  s.secondaryFileNames=cms.untracked.vstring(sec)
166  else:
167  s.secondaryFileNames.extend(sec)
168  print "found files: ",prim
169  if len(sec)!=0:
170  print "found parent files:",sec
171  return (prim,sec)
172 
173 def MassReplaceInputTag(aProcess,oldT="rawDataCollector",newT="rawDataRepacker"):
174  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
175  for s in aProcess.paths_().keys():
176  massSearchReplaceAnyInputTag(getattr(aProcess,s),oldT,newT)
177  for s in aProcess.endpaths_().keys():
178  massSearchReplaceAnyInputTag(getattr(aProcess,s),oldT,newT)
179 
180 def anyOf(listOfKeys,dict,opt=None):
181  for k in listOfKeys:
182  if k in dict:
183  toReturn=dict[k]
184  dict.pop(k)
185  return toReturn
186  if opt!=None:
187  return opt
188  else:
189  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
190 
192  """The main building routines """
193 
194  def __init__(self, options, process = None, with_output = False, with_input = False ):
195  """options taken from old cmsDriver and optparse """
196 
197  options.outfile_name = options.dirout+options.fileout
198 
199  self._options = options
200 
201  if self._options.isData and options.isMC:
202  raise Exception("ERROR: You may specify only --data or --mc, not both")
203  #if not self._options.conditions:
204  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
205 
206  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
207  if 'ENDJOB' in self._options.step:
208  if (hasattr(self._options,"outputDefinition") and \
209  self._options.outputDefinition != '' and \
210  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
211  (hasattr(self._options,"datatier") and \
212  self._options.datatier and \
213  'DQMIO' in self._options.datatier):
214  print "removing ENDJOB from steps since not compatible with DQMIO dataTier"
215  self._options.step=self._options.step.replace(',ENDJOB','')
216 
217 
218 
219  # what steps are provided by this class?
220  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
221  self.stepMap={}
222  self.stepKeys=[]
223  for step in self._options.step.split(","):
224  if step=='': continue
225  stepParts = step.split(":")
226  stepName = stepParts[0]
227  if stepName not in stepList and not stepName.startswith('re'):
228  raise ValueError("Step "+stepName+" unknown")
229  if len(stepParts)==1:
230  self.stepMap[stepName]=""
231  elif len(stepParts)==2:
232  self.stepMap[stepName]=stepParts[1].split('+')
233  elif len(stepParts)==3:
234  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
235  else:
236  raise ValueError("Step definition "+step+" invalid")
237  self.stepKeys.append(stepName)
238 
239  #print "map of steps is:",self.stepMap
240 
241  self.with_output = with_output
242  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
243  self.with_output = False
244  self.with_input = with_input
245  if process == None:
246  self.process = cms.Process(self._options.name)
247  else:
248  self.process = process
249  self.imports = []
250  self.importsUnsch = []
251  self.define_Configs()
252  self.schedule = list()
253 
254  # we are doing three things here:
255  # creating a process to catch errors
256  # building the code to re-create the process
257 
258  self.additionalCommands = []
259  # TODO: maybe a list of to be dumped objects would help as well
260  self.blacklist_paths = []
261  self.addedObjects = []
262  self.additionalOutputs = {}
263 
264  self.productionFilterSequence = None
265  self.nextScheduleIsConditional=False
266  self.conditionalPaths=[]
267  self.excludedPaths=[]
268 
269  def profileOptions(self):
270  """
271  addIgProfService
272  Function to add the igprof profile service so that you can dump in the middle
273  of the run.
274  """
275  profileOpts = self._options.profile.split(':')
276  profilerStart = 1
277  profilerInterval = 100
278  profilerFormat = None
279  profilerJobFormat = None
280 
281  if len(profileOpts):
282  #type, given as first argument is unused here
283  profileOpts.pop(0)
284  if len(profileOpts):
285  startEvent = profileOpts.pop(0)
286  if not startEvent.isdigit():
287  raise Exception("%s is not a number" % startEvent)
288  profilerStart = int(startEvent)
289  if len(profileOpts):
290  eventInterval = profileOpts.pop(0)
291  if not eventInterval.isdigit():
292  raise Exception("%s is not a number" % eventInterval)
293  profilerInterval = int(eventInterval)
294  if len(profileOpts):
295  profilerFormat = profileOpts.pop(0)
296 
297 
298  if not profilerFormat:
299  profilerFormat = "%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace("_cfi", ""),
300  self._options.step,
301  self._options.pileup,
302  self._options.conditions,
303  self._options.datatier,
304  self._options.profileTypeLabel)
305  if not profilerJobFormat and profilerFormat.endswith(".gz"):
306  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
307  elif not profilerJobFormat:
308  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
309 
310  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
311 
312  def load(self,includeFile):
313  includeFile = includeFile.replace('/','.')
314  self.process.load(includeFile)
315  return sys.modules[includeFile]
316 
317  def loadAndRemember(self, includeFile,unsch=0):
318  """helper routine to load am memorize imports"""
319  # we could make the imports a on-the-fly data method of the process instance itself
320  # not sure if the latter is a good idea
321  includeFile = includeFile.replace('/','.')
322  if unsch==0:
323  self.imports.append(includeFile)
324  self.process.load(includeFile)
325  return sys.modules[includeFile]
326  else:
327  self.importsUnsch.append(includeFile)
328  return 0#sys.modules[includeFile]
329 
330  def executeAndRemember(self, command):
331  """helper routine to remember replace statements"""
332  self.additionalCommands.append(command)
333  if not command.strip().startswith("#"):
334  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
335  import re
336  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
337  #exec(command.replace("process.","self.process."))
338 
339  def addCommon(self):
340  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
341  self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring('ProductNotFound'),fileMode = cms.untracked.string('FULLMERGE'))
342  else:
343  self.process.options = cms.untracked.PSet( )
344 
345  if self._options.runUnscheduled:
346  self.process.options.allowUnscheduled=cms.untracked.bool(True)
347 
348  self.addedObjects.append(("","options"))
349 
350  if self._options.lazy_download:
351  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
352  stats = cms.untracked.bool(True),
353  enable = cms.untracked.bool(True),
354  cacheHint = cms.untracked.string("lazy-download"),
355  readHint = cms.untracked.string("read-ahead-buffered")
356  )
357  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
358 
359  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
360  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
361 
362  if self._options.profile:
363  (start, interval, eventFormat, jobFormat)=self.profileOptions()
364  self.process.IgProfService = cms.Service("IgProfService",
365  reportFirstEvent = cms.untracked.int32(start),
366  reportEventInterval = cms.untracked.int32(interval),
367  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
368  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
369  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
370 
371  def addMaxEvents(self):
372  """Here we decide how many evts will be processed"""
373  self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(int(self._options.number)))
374  if self._options.number_out:
375  self.process.maxEvents.output = cms.untracked.int32(int(self._options.number_out))
376  self.addedObjects.append(("","maxEvents"))
377 
378  def addSource(self):
379  """Here the source is built. Priority: file, generator"""
380  self.addedObjects.append(("Input source","source"))
381 
382  def filesFromOption(self):
383  for entry in self._options.filein.split(','):
384  print "entry",entry
385  if entry.startswith("filelist:"):
386  filesFromList(entry[9:],self.process.source)
387  elif entry.startswith("dbs:") or entry.startswith("das:"):
388  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
389  else:
390  self.process.source.fileNames.append(self._options.dirin+entry)
391  if self._options.secondfilein:
392  if not hasattr(self.process.source,"secondaryFileNames"):
393  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
394  for entry in self._options.secondfilein.split(','):
395  print "entry",entry
396  if entry.startswith("filelist:"):
397  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
398  elif entry.startswith("dbs:") or entry.startswith("das:"):
399  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
400  else:
401  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
402 
403  if self._options.filein or self._options.dasquery:
404  if self._options.filetype == "EDM":
405  self.process.source=cms.Source("PoolSource",
406  fileNames = cms.untracked.vstring(),
407  secondaryFileNames= cms.untracked.vstring())
408  filesFromOption(self)
409  elif self._options.filetype == "DAT":
410  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
411  filesFromOption(self)
412  elif self._options.filetype == "LHE":
413  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
414  if self._options.filein.startswith("lhe:"):
415  #list the article directory automatically
416  args=self._options.filein.split(':')
417  article=args[1]
418  print 'LHE input from article ',article
419  location='/store/lhe/'
420  import os
421  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
422  for line in textOfFiles:
423  for fileName in [x for x in line.split() if '.lhe' in x]:
424  self.process.source.fileNames.append(location+article+'/'+fileName)
425  #check first if list of LHE files is loaded (not empty)
426  if len(line)<2:
427  print 'Issue to load LHE files, please check and try again.'
428  sys.exit(-1)
429  if len(args)>2:
430  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
431  else:
432  filesFromOption(self)
433 
434 
435  elif self._options.filetype == "DQM":
436  self.process.source=cms.Source("DQMRootSource",
437  fileNames = cms.untracked.vstring())
438  filesFromOption(self)
439 
440  elif self._options.filetype == "DQMDAQ":
441  # FIXME: how to configure it if there are no input files specified?
442  self.process.source=cms.Source("DQMStreamerReader")
443 
444 
445  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
446  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
447 
448  if self._options.dasquery!='':
449  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
450  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
451 
452  ##drop LHEXMLStringProduct on input to save memory if appropriate
453  if 'GEN' in self.stepMap.keys():
454  if self._options.inputCommands:
455  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
456  else:
457  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
458 
459  if self.process.source and self._options.inputCommands:
460  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
461  for command in self._options.inputCommands.split(','):
462  # remove whitespace around the keep/drop statements
463  command = command.strip()
464  if command=='': continue
465  self.process.source.inputCommands.append(command)
466  if not self._options.dropDescendant:
467  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
468 
469  if self._options.lumiToProcess:
470  import FWCore.PythonUtilities.LumiList as LumiList
471  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
472 
473  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
474  if self.process.source is None:
475  self.process.source=cms.Source("EmptySource")
476 
477  # modify source in case of run-dependent MC
478  self.runsAndWeights=None
479  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
480  if not self._options.isMC :
481  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
482  if self._options.runsAndWeightsForMC:
483  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
484  else:
485  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
486  if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
487  __import__(RunsAndWeights[self._options.runsScenarioForMC])
488  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
489  else:
490  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
491 
492  if self.runsAndWeights:
493  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
494  ThrowAndSetRandomRun.throwAndSetRandomRun(self.process.source,self.runsAndWeights)
495  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
496  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
497 
498  return
499 
500  def addOutput(self):
501  """ Add output module to the process """
502  result=""
503  if self._options.outputDefinition:
504  if self._options.datatier:
505  print "--datatier & --eventcontent options ignored"
506 
507  #new output convention with a list of dict
508  outList = eval(self._options.outputDefinition)
509  for (id,outDefDict) in enumerate(outList):
510  outDefDictStr=outDefDict.__str__()
511  if not isinstance(outDefDict,dict):
512  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
513  #requires option: tier
514  theTier=anyOf(['t','tier','dataTier'],outDefDict)
515  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
516  ## event content
517  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
518  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
519  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
520  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
521  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
522  # module label has a particular role
523  if not theModuleLabel:
524  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
525  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
526  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
527  ]
528  for name in tryNames:
529  if not hasattr(self.process,name):
530  theModuleLabel=name
531  break
532  if not theModuleLabel:
533  raise Exception("cannot find a module label for specification: "+outDefDictStr)
534  if id==0:
535  defaultFileName=self._options.outfile_name
536  else:
537  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
538 
539  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
540  if not theFileName.endswith('.root'):
541  theFileName+='.root'
542 
543  if len(outDefDict.keys()):
544  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
545  if theStreamType=='DQMIO': theStreamType='DQM'
546  if theStreamType=='ALL':
547  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
548  else:
549  theEventContent = getattr(self.process, theStreamType+"EventContent")
550 
551 
552  addAlCaSelects=False
553  if theStreamType=='ALCARECO' and not theFilterName:
554  theFilterName='StreamALCACombined'
555  addAlCaSelects=True
556 
557  CppType='PoolOutputModule'
558  if self._options.timeoutOutput:
559  CppType='TimeoutPoolOutputModule'
560  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
561  output = cms.OutputModule(CppType,
562  theEventContent.clone(),
563  fileName = cms.untracked.string(theFileName),
564  dataset = cms.untracked.PSet(
565  dataTier = cms.untracked.string(theTier),
566  filterName = cms.untracked.string(theFilterName))
567  )
568  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
569  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
570  if not theSelectEvent and hasattr(self.process,'filtering_step'):
571  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
572  if theSelectEvent:
573  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
574 
575  if addAlCaSelects:
576  if not hasattr(output,'SelectEvents'):
577  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
578  for alca in self.AlCaPaths:
579  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
580 
581 
582  if hasattr(self.process,theModuleLabel):
583  raise Exception("the current process already has a module "+theModuleLabel+" defined")
584  #print "creating output module ",theModuleLabel
585  setattr(self.process,theModuleLabel,output)
586  outputModule=getattr(self.process,theModuleLabel)
587  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
588  path=getattr(self.process,theModuleLabel+'_step')
589  self.schedule.append(path)
590 
591  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
592  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"):
593  return label
594  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
595  if theExtraOutputCommands:
596  if not isinstance(theExtraOutputCommands,list):
597  raise Exception("extra ouput command in --option must be a list of strings")
598  if hasattr(self.process,theStreamType+"EventContent"):
599  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
600  else:
601  outputModule.outputCommands.extend(theExtraOutputCommands)
602 
603  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
604 
605  ##ends the --output options model
606  return result
607 
608  streamTypes=self._options.eventcontent.split(',')
609  tiers=self._options.datatier.split(',')
610  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
611  raise Exception("number of event content arguments does not match number of datatier arguments")
612 
613  # if the only step is alca we don't need to put in an output
614  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
615  return "\n"
616 
617  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
618  if streamType=='': continue
619  if streamType == 'ALCARECO' and not 'ALCAPRODUCER' in self._options.step: continue
620  if streamType=='DQMIO': streamType='DQM'
621  theEventContent = getattr(self.process, streamType+"EventContent")
622  if i==0:
623  theFileName=self._options.outfile_name
624  theFilterName=self._options.filtername
625  else:
626  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
627  theFilterName=self._options.filtername
628  CppType='PoolOutputModule'
629  if self._options.timeoutOutput:
630  CppType='TimeoutPoolOutputModule'
631  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
632  output = cms.OutputModule(CppType,
633  theEventContent,
634  fileName = cms.untracked.string(theFileName),
635  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
636  filterName = cms.untracked.string(theFilterName)
637  )
638  )
639  if hasattr(self.process,"generation_step") and streamType!='LHE':
640  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
641  if hasattr(self.process,"filtering_step"):
642  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
643 
644  if streamType=='ALCARECO':
645  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
646 
647  if "MINIAOD" in streamType:
648  output.dropMetaData = cms.untracked.string('ALL')
649  output.fastCloning= cms.untracked.bool(False)
650  output.overrideInputFileSplitLevels = cms.untracked.bool(True)
651 
652  outputModuleName=streamType+'output'
653  setattr(self.process,outputModuleName,output)
654  outputModule=getattr(self.process,outputModuleName)
655  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
656  path=getattr(self.process,outputModuleName+'_step')
657  self.schedule.append(path)
658 
659  if self._options.outputCommands and streamType!='DQM':
660  for evct in self._options.outputCommands.split(','):
661  if not evct: continue
662  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
663 
664  if not self._options.inlineEventContent:
665  def doNotInlineEventContent(instance,label = "process."+streamType+"EventContent.outputCommands"):
666  return label
667  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
668 
669  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
670 
671  return result
672 
674  """
675  Add selected standard sequences to the process
676  """
677  # load the pile up file
678  if self._options.pileup:
679  pileupSpec=self._options.pileup.split(',')[0]
680 
681  # Does the requested pile-up scenario exist?
682  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
683  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
684  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
685  raise Exception(message)
686 
687  # Put mixing parameters in a dictionary
688  if '.' in pileupSpec:
689  mixingDict={'file':pileupSpec}
690  elif pileupSpec.startswith('file:'):
691  mixingDict={'file':pileupSpec[5:]}
692  else:
693  import copy
694  mixingDict=copy.copy(Mixing[pileupSpec])
695  if len(self._options.pileup.split(','))>1:
696  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
697 
698  # Load the pu cfg file corresponding to the requested pu scenario
699  if 'file:' in pileupSpec:
700  #the file is local
701  self.process.load(mixingDict['file'])
702  print "inlining mixing module configuration"
703  self._options.inlineObjets+=',mix'
704  else:
705  self.loadAndRemember(mixingDict['file'])
706 
707  mixingDict.pop('file')
708  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
709  if self._options.pileup_input:
710  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
711  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
712  elif self._options.pileup_input.startswith("filelist:"):
713  mixingDict['F']=(filesFromList(self._options.pileup_input[9:]))[0]
714  else:
715  mixingDict['F']=self._options.pileup_input.split(',')
716  specialization=defineMixing(mixingDict)
717  for command in specialization:
718  self.executeAndRemember(command)
719  if len(mixingDict)!=0:
720  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
721 
722 
723  # load the geometry file
724  try:
725  if len(self.stepMap):
726  self.loadAndRemember(self.GeometryCFF)
727  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
728  self.loadAndRemember(self.SimGeometryCFF)
729  if self.geometryDBLabel:
730  self.executeAndRemember('process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
731  except ImportError:
732  print "Geometry option",self._options.geometry,"unknown."
733  raise
734 
735  if len(self.stepMap):
736  self.loadAndRemember(self.magFieldCFF)
737 
738  for stepName in self.stepKeys:
739  stepSpec = self.stepMap[stepName]
740  print "Step:", stepName,"Spec:",stepSpec
741  if stepName.startswith('re'):
742  ##add the corresponding input content
743  if stepName[2:] not in self._options.donotDropOnInput:
744  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
745  stepName=stepName[2:]
746  if stepSpec=="":
747  getattr(self,"prepare_"+stepName)(sequence = getattr(self,stepName+"DefaultSeq"))
748  elif type(stepSpec)==list:
749  getattr(self,"prepare_"+stepName)(sequence = '+'.join(stepSpec))
750  elif type(stepSpec)==tuple:
751  getattr(self,"prepare_"+stepName)(sequence = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
752  else:
753  raise ValueError("Invalid step definition")
754 
755  if self._options.restoreRNDSeeds!=False:
756  #it is either True, or a process name
757  if self._options.restoreRNDSeeds==True:
758  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
759  else:
760  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
761  if self._options.inputEventContent or self._options.inputCommands:
762  if self._options.inputCommands:
763  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
764  else:
765  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
766 
767 
769  if self._options.inputEventContent:
770  import copy
771  def dropSecondDropStar(iec):
772  #drop occurence of 'drop *' in the list
773  count=0
774  for item in iec:
775  if item=='drop *':
776  if count!=0:
777  iec.remove(item)
778  count+=1
779 
780 
781  ## allow comma separated input eventcontent
782  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
783  for evct in self._options.inputEventContent.split(','):
784  if evct=='': continue
785  theEventContent = getattr(self.process, evct+"EventContent")
786  if hasattr(theEventContent,'outputCommands'):
787  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
788  if hasattr(theEventContent,'inputCommands'):
789  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
790 
791  dropSecondDropStar(self.process.source.inputCommands)
792 
793  if not self._options.dropDescendant:
794  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
795 
796 
797  return
798 
799  def addConditions(self):
800  """Add conditions to the process"""
801  if not self._options.conditions: return
802 
803  if 'FrontierConditions_GlobalTag' in self._options.conditions:
804  print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line'
805  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
806 
807  self.loadAndRemember(self.ConditionsDefaultCFF)
808  from Configuration.AlCa.GlobalTag import GlobalTag
809  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
810  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
811  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
812 
813 
814  def addCustomise(self,unsch=0):
815  """Include the customise code """
816 
817  custOpt=[]
818  if unsch==0:
819  for c in self._options.customisation_file:
820  custOpt.extend(c.split(","))
821  else:
822  for c in self._options.customisation_file_unsch:
823  custOpt.extend(c.split(","))
824 
825  custMap=DictTypes.SortedKeysDict()
826  for opt in custOpt:
827  if opt=='': continue
828  if opt.count('.')>1:
829  raise Exception("more than . in the specification:"+opt)
830  fileName=opt.split('.')[0]
831  if opt.count('.')==0: rest='customise'
832  else:
833  rest=opt.split('.')[1]
834  if rest=='py': rest='customise' #catch the case of --customise file.py
835 
836  if fileName in custMap:
837  custMap[fileName].extend(rest.split('+'))
838  else:
839  custMap[fileName]=rest.split('+')
840 
841  if len(custMap)==0:
842  final_snippet='\n'
843  else:
844  final_snippet='\n# customisation of the process.\n'
845 
846  allFcn=[]
847  for opt in custMap:
848  allFcn.extend(custMap[opt])
849  for fcn in allFcn:
850  if allFcn.count(fcn)!=1:
851  raise Exception("cannot specify twice "+fcn+" as a customisation method")
852 
853  for f in custMap:
854  # let python search for that package and do syntax checking at the same time
855  packageName = f.replace(".py","").replace("/",".")
856  __import__(packageName)
857  package = sys.modules[packageName]
858 
859  # now ask the package for its definition and pick .py instead of .pyc
860  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
861 
862  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
863  if self._options.inline_custom:
864  for line in file(customiseFile,'r'):
865  if "import FWCore.ParameterSet.Config" in line:
866  continue
867  final_snippet += line
868  else:
869  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
870  for fcn in custMap[f]:
871  print "customising the process with",fcn,"from",f
872  if not hasattr(package,fcn):
873  #bound to fail at run time
874  raise Exception("config "+f+" has no function "+fcn)
875  #execute the command
876  self.process=getattr(package,fcn)(self.process)
877  #and print it in the configuration
878  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
879  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
880 
881  if len(custMap)!=0:
882  final_snippet += '\n# End of customisation functions\n'
883 
884  ### now for a useful command
885  return final_snippet
886 
888  final_snippet='\n# Customisation from command line\n'
889  if self._options.customise_commands:
890  import string
891  for com in self._options.customise_commands.split('\\n'):
892  com=string.lstrip(com)
893  self.executeAndRemember(com)
894  final_snippet +='\n'+com
895 
896  return final_snippet
897 
898  #----------------------------------------------------------------------------
899  # here the methods to define the python includes for each step or
900  # conditions
901  #----------------------------------------------------------------------------
902  def define_Configs(self):
903  if len(self.stepMap):
904  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
905  if self._options.particleTable not in defaultOptions.particleTableList:
906  print 'Invalid particle table provided. Options are:'
907  print defaultOptions.particleTable
908  sys.exit(-1)
909  else:
910  if len(self.stepMap):
911  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
912 
913  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
914 
915  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
916  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
917  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
918  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
919  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
920  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
921  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
922  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
923  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
924  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
925  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
926  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
927  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
928  self.EIDefaultCFF=None
929  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
930  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
931  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
932  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
933  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
934  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
935  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
936  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
937  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
938  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
939  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
940 
941  if "DATAMIX" in self.stepMap.keys():
942  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
943  if self._options.datamix == 'PreMix':
944  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDMPreMix_cff"
945  else:
946  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
947  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
948  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
949 
950  if "DIGIPREMIX" in self.stepMap.keys():
951  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_PreMix_cff"
952 
953  self.ALCADefaultSeq=None
954  self.LHEDefaultSeq='externalLHEProducer'
955  self.GENDefaultSeq='pgen'
956  self.SIMDefaultSeq='psim'
957  self.DIGIDefaultSeq='pdigi'
958  self.DIGIPREMIXDefaultSeq='pdigi'
959  self.DIGIPREMIX_S2DefaultSeq='pdigi'
960  self.DATAMIXDefaultSeq=None
961  self.DIGI2RAWDefaultSeq='DigiToRaw'
962  self.HLTDefaultSeq='GRun'
963  self.L1DefaultSeq=None
964  self.L1REPACKDefaultSeq='GT'
965  self.HARVESTINGDefaultSeq=None
966  self.ALCAHARVESTDefaultSeq=None
967  self.CFWRITERDefaultSeq=None
968  self.RAW2DIGIDefaultSeq='RawToDigi'
969  self.L1RecoDefaultSeq='L1Reco'
970  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
971  if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
972  self.RECODefaultSeq='reconstruction'
973  else:
974  self.RECODefaultSeq='reconstruction_fromRECO'
975 
976  self.EIDefaultSeq='top'
977  self.POSTRECODefaultSeq=None
978  self.L1HwValDefaultSeq='L1HwVal'
979  self.DQMDefaultSeq='DQMOffline'
980  self.VALIDATIONDefaultSeq=''
981  self.ENDJOBDefaultSeq='endOfProcess'
982  self.REPACKDefaultSeq='DigiToRawRepack'
983  self.PATDefaultSeq='miniAOD'
984 
985  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
986 
987  if not self._options.beamspot:
988  self._options.beamspot=VtxSmearedDefaultKey
989 
990  # if its MC then change the raw2digi
991  if self._options.isMC==True:
992  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
993  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
994  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
995  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
996  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
997  else:
998  self._options.beamspot = None
999 
1000  #patch for gen, due to backward incompatibility
1001  if 'reGEN' in self.stepMap:
1002  self.GENDefaultSeq='fixGenInfo'
1003 
1004  if self._options.scenario=='cosmics':
1005  self._options.pileup='Cosmics'
1006  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1007  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1008  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1009  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1010  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1011  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1012  if self._options.isMC==True:
1013  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1014  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1015  self.RECODefaultSeq='reconstructionCosmics'
1016  self.DQMDefaultSeq='DQMOfflineCosmics'
1017 
1018  if self._options.scenario=='HeavyIons':
1019  if not self._options.beamspot:
1020  self._options.beamspot=VtxSmearedHIDefaultKey
1021  self.HLTDefaultSeq = 'HIon'
1022  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1023  self.VALIDATIONDefaultSeq=''
1024  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1025  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1026  self.RECODefaultSeq='reconstructionHeavyIons'
1027  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1028  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1029  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1030  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1031  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1032  if self._options.isMC==True:
1033  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1034 
1035 
1036  self.RAW2RECODefaultSeq=','.join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1037 
1038  self.USERDefaultSeq='user'
1039  self.USERDefaultCFF=None
1040 
1041  # the magnetic field
1042  if self._options.isData:
1043  if self._options.magField==defaultOptions.magField:
1044  print "magnetic field option forced to: AutoFromDBCurrent"
1045  self._options.magField='AutoFromDBCurrent'
1046  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1047  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1048 
1049  # the geometry
1050  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1051  self.geometryDBLabel=None
1052  simGeometry=''
1053  if self._options.fast:
1054  if 'start' in self._options.conditions.lower():
1055  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1056  else:
1057  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1058  else:
1059  def inGeometryKeys(opt):
1060  from Configuration.StandardSequences.GeometryConf import GeometryConf
1061  if opt in GeometryConf:
1062  return GeometryConf[opt]
1063  else:
1064  return opt
1065 
1066  geoms=self._options.geometry.split(',')
1067  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1068  if len(geoms)==2:
1069  #may specify the reco geometry
1070  if '/' in geoms[1] or '_cff' in geoms[1]:
1071  self.GeometryCFF=geoms[1]
1072  else:
1073  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1074 
1075  if (geoms[0].startswith('DB:')):
1076  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1077  self.geometryDBLabel=geoms[0][3:]
1078  print "with DB:"
1079  else:
1080  if '/' in geoms[0] or '_cff' in geoms[0]:
1081  self.SimGeometryCFF=geoms[0]
1082  else:
1083  simGeometry=geoms[0]
1084  if self._options.gflash==True:
1085  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1086  else:
1087  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1088 
1089  # synchronize the geometry configuration and the FullSimulation sequence to be used
1090  if simGeometry not in defaultOptions.geometryExtendedOptions:
1091  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1092 
1093  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1094  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1095  self._options.beamspot='NoSmear'
1096 
1097  # fastsim requires some changes to the default cff files and sequences
1098  if self._options.fast:
1099  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1100  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1101  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1102  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1103  self.DQMOFFLINEDefaultCFF="FastSimulation.Configuration.DQMOfflineMC_cff"
1104 
1105  # Mixing
1106  if self._options.pileup=='default':
1107  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1108  self._options.pileup=MixingDefaultKey
1109 
1110 
1111  #not driven by a default cff anymore
1112  if self._options.isData:
1113  self._options.pileup=None
1114 
1115 
1116  self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1117 
1118  # for alca, skims, etc
1119  def addExtraStream(self,name,stream,workflow='full'):
1120  # define output module and go from there
1121  output = cms.OutputModule("PoolOutputModule")
1122  if stream.selectEvents.parameters_().__len__()!=0:
1123  output.SelectEvents = stream.selectEvents
1124  else:
1125  output.SelectEvents = cms.untracked.PSet()
1126  output.SelectEvents.SelectEvents=cms.vstring()
1127  if isinstance(stream.paths,tuple):
1128  for path in stream.paths:
1129  output.SelectEvents.SelectEvents.append(path.label())
1130  else:
1131  output.SelectEvents.SelectEvents.append(stream.paths.label())
1132 
1133 
1134 
1135  if isinstance(stream.content,str):
1136  evtPset=getattr(self.process,stream.content)
1137  for p in evtPset.parameters_():
1138  setattr(output,p,getattr(evtPset,p))
1139  if not self._options.inlineEventContent:
1140  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1141  return label
1142  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1143  else:
1144  output.outputCommands = stream.content
1145 
1146 
1147  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1148 
1149  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1150  filterName = cms.untracked.string(stream.name))
1151 
1152  if self._options.filtername:
1153  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1154 
1155  #add an automatic flushing to limit memory consumption
1156  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1157 
1158  if workflow in ("producers,full"):
1159  if isinstance(stream.paths,tuple):
1160  for path in stream.paths:
1161  self.schedule.append(path)
1162  else:
1163  self.schedule.append(stream.paths)
1164 
1165 
1166  # in case of relvals we don't want to have additional outputs
1167  if (not self._options.relval) and workflow in ("full","output"):
1168  self.additionalOutputs[name] = output
1169  setattr(self.process,name,output)
1170 
1171  if workflow == 'output':
1172  # adjust the select events to the proper trigger results from previous process
1173  filterList = output.SelectEvents.SelectEvents
1174  for i, filter in enumerate(filterList):
1175  filterList[i] = filter+":"+self._options.triggerResultsProcess
1176 
1177  return output
1178 
1179  #----------------------------------------------------------------------------
1180  # here the methods to create the steps. Of course we are doing magic here ;)
1181  # prepare_STEPNAME modifies self.process and what else's needed.
1182  #----------------------------------------------------------------------------
1183 
1184  def loadDefaultOrSpecifiedCFF(self, sequence,defaultCFF,unsch=0):
1185  if ( len(sequence.split('.'))==1 ):
1186  l=self.loadAndRemember(defaultCFF,unsch)
1187  elif ( len(sequence.split('.'))==2 ):
1188  l=self.loadAndRemember(sequence.split('.')[0],unsch)
1189  sequence=sequence.split('.')[1]
1190  else:
1191  print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a"
1192  print sequence,"not recognized"
1193  raise
1194  return l
1195 
1196  def scheduleSequence(self,seq,prefix,what='Path'):
1197  if '*' in seq:
1198  #create only one path with all sequences in it
1199  for i,s in enumerate(seq.split('*')):
1200  if i==0:
1201  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1202  else:
1203  p=getattr(self.process,prefix)
1204  p+=getattr(self.process, s)
1205  self.schedule.append(getattr(self.process,prefix))
1206  return
1207  else:
1208  #create as many path as many sequences
1209  if not '+' in seq:
1210  if self.nextScheduleIsConditional:
1211  self.conditionalPaths.append(prefix)
1212  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1213  self.schedule.append(getattr(self.process,prefix))
1214  else:
1215  for i,s in enumerate(seq.split('+')):
1216  sn=prefix+'%d'%(i)
1217  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1218  self.schedule.append(getattr(self.process,sn))
1219  return
1220 
1221  def scheduleSequenceAtEnd(self,seq,prefix):
1222  self.scheduleSequence(seq,prefix,what='EndPath')
1223  return
1224 
1225  def prepare_ALCAPRODUCER(self, sequence = None):
1226  self.prepare_ALCA(sequence, workflow = "producers")
1227 
1228  def prepare_ALCAOUTPUT(self, sequence = None):
1229  self.prepare_ALCA(sequence, workflow = "output")
1230 
1231  def prepare_ALCA(self, sequence = None, workflow = 'full'):
1232  """ Enrich the process with alca streams """
1233  alcaConfig=self.loadDefaultOrSpecifiedCFF(sequence,self.ALCADefaultCFF)
1234  sequence = sequence.split('.')[-1]
1235 
1236  # decide which ALCA paths to use
1237  alcaList = sequence.split("+")
1238  maxLevel=0
1239  from Configuration.AlCa.autoAlca import autoAlca
1240  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1241  self.expandMapping(alcaList,autoAlca)
1242  self.AlCaPaths=[]
1243  for name in alcaConfig.__dict__:
1244  alcastream = getattr(alcaConfig,name)
1245  shortName = name.replace('ALCARECOStream','')
1246  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1247  output = self.addExtraStream(name,alcastream, workflow = workflow)
1248  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1249  self.AlCaPaths.append(shortName)
1250  if 'DQM' in alcaList:
1251  if not self._options.inlineEventContent and hasattr(self.process,name):
1252  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1253  else:
1254  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1255 
1256  #rename the HLT process name in the alca modules
1257  if self._options.hltProcess or 'HLT' in self.stepMap:
1258  if isinstance(alcastream.paths,tuple):
1259  for path in alcastream.paths:
1260  self.renameHLTprocessInSequence(path.label())
1261  else:
1262  self.renameHLTprocessInSequence(alcastream.paths.label())
1263 
1264  for i in range(alcaList.count(shortName)):
1265  alcaList.remove(shortName)
1266 
1267  # DQM needs a special handling
1268  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1269  path = getattr(alcaConfig,name)
1270  self.schedule.append(path)
1271  alcaList.remove('DQM')
1272 
1273  if isinstance(alcastream,cms.Path):
1274  #black list the alca path so that they do not appear in the cfg
1275  self.blacklist_paths.append(alcastream)
1276 
1277 
1278  if len(alcaList) != 0:
1279  available=[]
1280  for name in alcaConfig.__dict__:
1281  alcastream = getattr(alcaConfig,name)
1282  if isinstance(alcastream,cms.FilteredStream):
1283  available.append(name.replace('ALCARECOStream',''))
1284  print "The following alcas could not be found "+str(alcaList)
1285  print "available ",available
1286  #print "verify your configuration, ignoring for now"
1287  raise Exception("The following alcas could not be found "+str(alcaList))
1288 
1289  def prepare_LHE(self, sequence = None):
1290  #load the fragment
1291  ##make it loadable
1292  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1293  print "Loading lhe fragment from",loadFragment
1294  __import__(loadFragment)
1295  self.process.load(loadFragment)
1296  ##inline the modules
1297  self._options.inlineObjets+=','+sequence
1298 
1299  getattr(self.process,sequence).nEvents = int(self._options.number)
1300 
1301  #schedule it
1302  self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1303  self.excludedPaths.append("lhe_step")
1304  self.schedule.append( self.process.lhe_step )
1305 
1306  def prepare_GEN(self, sequence = None):
1307  """ load the fragment of generator configuration """
1308  loadFailure=False
1309  #remove trailing .py
1310  #support old style .cfi by changing into something.cfi into something_cfi
1311  #remove python/ from the name
1312  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1313  #standard location of fragments
1314  if not '/' in loadFragment:
1315  loadFragment='Configuration.Generator.'+loadFragment
1316  else:
1317  loadFragment=loadFragment.replace('/','.')
1318  try:
1319  print "Loading generator fragment from",loadFragment
1320  __import__(loadFragment)
1321  except:
1322  loadFailure=True
1323  #if self.process.source and self.process.source.type_()=='EmptySource':
1324  if not (self._options.filein or self._options.dasquery):
1325  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1326 
1327  if not loadFailure:
1328  generatorModule=sys.modules[loadFragment]
1329  genModules=generatorModule.__dict__
1330  #remove lhe producer module since this should have been
1331  #imported instead in the LHE step
1332  if self.LHEDefaultSeq in genModules:
1333  del genModules[self.LHEDefaultSeq]
1334 
1335  if self._options.hideGen:
1336  self.loadAndRemember(loadFragment)
1337  else:
1338  self.process.load(loadFragment)
1339  # expose the objects from that fragment to the configuration
1340  import FWCore.ParameterSet.Modules as cmstypes
1341  for name in genModules:
1342  theObject = getattr(generatorModule,name)
1343  if isinstance(theObject, cmstypes._Module):
1344  self._options.inlineObjets=name+','+self._options.inlineObjets
1345  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1346  self._options.inlineObjets+=','+name
1347 
1348  if sequence == self.GENDefaultSeq or sequence == 'pgen_genonly':
1349  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1350  self.productionFilterSequence = 'ProductionFilterSequence'
1351  elif 'generator' in genModules:
1352  self.productionFilterSequence = 'generator'
1353 
1354  """ Enrich the schedule with the rest of the generation step """
1355  self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1356  genSeqName=sequence.split('.')[-1]
1357 
1358  if True:
1359  try:
1360  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1361  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1362  self.loadAndRemember(cffToBeLoaded)
1363  except ImportError:
1364  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1365 
1366  if self._options.scenario == 'HeavyIons':
1367  if self._options.pileup=='HiMixGEN':
1368  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1369  else:
1370  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1371 
1372  self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1373  self.schedule.append(self.process.generation_step)
1374 
1375  #register to the genstepfilter the name of the path (static right now, but might evolve)
1376  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1377 
1378  if 'reGEN' in self.stepMap:
1379  #stop here
1380  return
1381 
1382  """ Enrich the schedule with the summary of the filter step """
1383  #the gen filter in the endpath
1384  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1385  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1386  return
1387 
1388  def prepare_SIM(self, sequence = None):
1389  """ Enrich the schedule with the simulation step"""
1390  self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1391  if not self._options.fast:
1392  if self._options.gflash==True:
1393  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1394 
1395  if self._options.magField=='0T':
1396  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1397  else:
1398  if self._options.magField=='0T':
1399  self.executeAndRemember("process.famosSimHits.UseMagneticField = cms.bool(False)")
1400 
1401  self.scheduleSequence(sequence.split('.')[-1],'simulation_step')
1402  return
1403 
1404  def prepare_DIGI(self, sequence = None):
1405  """ Enrich the schedule with the digitisation step"""
1406  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1407 
1408  if self._options.gflash==True:
1409  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1410 
1411  if sequence == 'pdigi_valid':
1412  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1413 
1414  if sequence != 'pdigi_nogen' and sequence != 'pdigi_valid_nogen' and not self.process.source.type_()=='EmptySource':
1415  if self._options.inputEventContent=='':
1416  self._options.inputEventContent='REGEN'
1417  else:
1418  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1419 
1420 
1421  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1422  return
1423 
1424  def prepare_DIGIPREMIX(self, sequence = None):
1425  """ Enrich the schedule with the digitisation step"""
1426  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1427 
1428  self.loadAndRemember("SimGeneral/MixingModule/digi_noNoise_cfi")
1429 
1430  if sequence == 'pdigi_valid':
1431  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1432  else:
1433  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1434 
1435  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1436  return
1437 
1438  def prepare_DIGIPREMIX_S2(self, sequence = None):
1439  """ Enrich the schedule with the digitisation step"""
1440  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1441 
1442  self.loadAndRemember("SimGeneral/MixingModule/digi_MixPreMix_cfi")
1443 
1444 
1445  if sequence == 'pdigi_valid':
1446  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1447  else:
1448  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1449 
1450  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1451  return
1452 
1453  def prepare_CFWRITER(self, sequence = None):
1454  """ Enrich the schedule with the crossing frame writer step"""
1455  self.loadAndRemember(self.CFWRITERDefaultCFF)
1456  self.scheduleSequence('pcfw','cfwriter_step')
1457  return
1458 
1459  def prepare_DATAMIX(self, sequence = None):
1460  """ Enrich the schedule with the digitisation step"""
1461  self.loadAndRemember(self.DATAMIXDefaultCFF)
1462  self.scheduleSequence('pdatamix','datamixing_step')
1463 
1464  if self._options.pileup_input:
1465  theFiles=''
1466  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1467  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1468  elif self._options.pileup_input.startswith("filelist:"):
1469  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1470  else:
1471  theFiles=self._options.pileup_input.split(',')
1472  #print theFiles
1473  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1474 
1475  return
1476 
1477  def prepare_DIGI2RAW(self, sequence = None):
1478  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGI2RAWDefaultCFF)
1479  self.scheduleSequence(sequence.split('.')[-1],'digi2raw_step')
1480  if "DIGIPREMIX" in self.stepMap.keys():
1481  self.executeAndRemember("process.esDigiToRaw.Label = cms.string('mix')") ##terrible hack - bypass zero suppression
1482  self.executeAndRemember("process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')") ##special readout mode for StripTracker
1483 
1484  return
1485 
1486  def prepare_REPACK(self, sequence = None):
1487  self.loadDefaultOrSpecifiedCFF(sequence,self.REPACKDefaultCFF)
1488  self.scheduleSequence(sequence.split('.')[-1],'digi2repack_step')
1489  return
1490 
1491  def prepare_L1(self, sequence = None):
1492  """ Enrich the schedule with the L1 simulation step"""
1493  assert(sequence == None)
1494  self.loadAndRemember(self.L1EMDefaultCFF)
1495  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1496  return
1497 
1498  def prepare_L1REPACK(self, sequence = None):
1499  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1500  supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT']
1501  if sequence in supported:
1502  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1503  if self._options.scenario == 'HeavyIons':
1504  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1505  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1506  else:
1507  print "L1REPACK with '",sequence,"' is not supported! Supported choices are: ",supported
1508  raise Exception('unsupported feature')
1509 
1510 
1511  def prepare_HLT(self, sequence = None):
1512  """ Enrich the schedule with the HLT simulation step"""
1513  if not sequence:
1514  print "no specification of the hlt menu has been given, should never happen"
1515  raise Exception('no HLT sequence provided')
1516 
1517  if '@' in sequence:
1518  # case where HLT:@something was provided
1519  from Configuration.HLT.autoHLT import autoHLT
1520  key = sequence[1:]
1521  if key in autoHLT:
1522  sequence = autoHLT[key]
1523  else:
1524  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1525 
1526  if ',' in sequence:
1527  #case where HLT:something:something was provided
1528  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1529  optionsForHLT = {}
1530  if self._options.scenario == 'HeavyIons':
1531  optionsForHLT['type'] = 'HIon'
1532  else:
1533  optionsForHLT['type'] = 'GRun'
1534  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.iteritems())
1535  if sequence == 'run,fromSource':
1536  if hasattr(self.process.source,'firstRun'):
1537  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1538  elif hasattr(self.process.source,'setRunNumber'):
1539  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1540  else:
1541  raise Exception('Cannot replace menu to load %s'%(sequence))
1542  else:
1543  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(sequence.replace(',',':'),optionsForHLTConfig))
1544  else:
1545  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % sequence)
1546 
1547  if self._options.isMC:
1548  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1549 
1550  if self._options.name != 'HLT':
1551  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1552  self.additionalCommands.append('process = ProcessName(process)')
1553  self.additionalCommands.append('')
1554  from HLTrigger.Configuration.CustomConfigs import ProcessName
1555  self.process = ProcessName(self.process)
1556 
1557  self.schedule.append(self.process.HLTSchedule)
1558  [self.blacklist_paths.append(path) for path in self.process.HLTSchedule if isinstance(path,(cms.Path,cms.EndPath))]
1559 
1560  #this is a fake, to be removed with fastim migration and HLT menu dump
1561  if self._options.fast:
1562  if not hasattr(self.process,'HLTEndSequence'):
1563  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1564 
1565 
1566  def prepare_RAW2RECO(self, sequence = None):
1567  if ','in sequence:
1568  seqReco=sequence.split(',')[1]
1569  seqDigi=sequence.split(',')[0]
1570  else:
1571  print "RAW2RECO requires two specifications",sequence,"insufficient"
1572 
1573  self.prepare_RAW2DIGI(seqDigi)
1574  self.prepare_RECO(seqReco)
1575  return
1576 
1577  def prepare_RAW2DIGI(self, sequence = "RawToDigi"):
1578  self.loadDefaultOrSpecifiedCFF(sequence,self.RAW2DIGIDefaultCFF)
1579  self.scheduleSequence(sequence,'raw2digi_step')
1580  # if self._options.isRepacked:
1581  #self.renameInputTagsInSequence(sequence)
1582  return
1583 
1584  def prepare_PATFILTER(self, sequence=None):
1585  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1586  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1587  for filt in allMetFilterPaths:
1588  self.schedule.append(getattr(self.process,'Flag_'+filt))
1589 
1590  def prepare_L1HwVal(self, sequence = 'L1HwVal'):
1591  ''' Enrich the schedule with L1 HW validation '''
1592  self.loadDefaultOrSpecifiedCFF(sequence,self.L1HwValDefaultCFF)
1593  #self.scheduleSequence(sequence.split('.')[-1],'l1hwval_step')
1594  print '\n\n\n DEPRECATED this has no action \n\n\n'
1595  return
1596 
1597  def prepare_L1Reco(self, sequence = "L1Reco"):
1598  ''' Enrich the schedule with L1 reconstruction '''
1599  self.loadDefaultOrSpecifiedCFF(sequence,self.L1RecoDefaultCFF)
1600  self.scheduleSequence(sequence.split('.')[-1],'L1Reco_step')
1601  return
1602 
1603  def prepare_L1TrackTrigger(self, sequence = "L1TrackTrigger"):
1604  ''' Enrich the schedule with L1 reconstruction '''
1606  self.scheduleSequence(sequence.split('.')[-1],'L1TrackTrigger_step')
1607  return
1608 
1609  def prepare_FILTER(self, sequence = None):
1610  ''' Enrich the schedule with a user defined filter sequence '''
1611  ## load the relevant part
1612  filterConfig=self.load(sequence.split('.')[0])
1613  filterSeq=sequence.split('.')[-1]
1614  ## print it in the configuration
1616  def __init__(self):
1617  self.inliner=''
1618  pass
1619  def enter(self,visitee):
1620  try:
1621  label=visitee.label()
1622  ##needs to be in reverse order
1623  self.inliner=label+','+self.inliner
1624  except:
1625  pass
1626  def leave(self,v): pass
1627 
1628  expander=PrintAllModules()
1629  getattr(self.process,filterSeq).visit( expander )
1630  self._options.inlineObjets+=','+expander.inliner
1631  self._options.inlineObjets+=','+filterSeq
1632 
1633  ## put the filtering path in the schedule
1634  self.scheduleSequence(filterSeq,'filtering_step')
1635  self.nextScheduleIsConditional=True
1636  ## put it before all the other paths
1637  self.productionFilterSequence = filterSeq
1638 
1639  return
1640 
1641  def prepare_RECO(self, sequence = "reconstruction"):
1642  ''' Enrich the schedule with reconstruction '''
1643  self.loadDefaultOrSpecifiedCFF(sequence,self.RECODefaultCFF)
1644  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_step')
1645  return
1646 
1647  def prepare_RECOBEFMIX(self, sequence = "reconstruction"):
1648  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1649  if not self._options.fast:
1650  print "ERROR: this step is only implemented for FastSim"
1651  sys.exit()
1652  self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1653  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_befmix_step')
1654  return
1655 
1656  def prepare_PAT(self, sequence = "miniAOD"):
1657  ''' Enrich the schedule with PAT '''
1658  self.prepare_PATFILTER(self)
1659  self.loadDefaultOrSpecifiedCFF(sequence,self.PATDefaultCFF,1) #this is unscheduled
1660  if not self._options.runUnscheduled:
1661  raise Exception("MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1662  if self._options.isData:
1663  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1664  else:
1665  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1666  if self._options.fast:
1667  self._options.customisation_file_unsch.insert(1,"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff.miniAOD_customizeMETFiltersFastSim")
1668 
1669  if self._options.hltProcess:
1670  if len(self._options.customise_commands) > 1:
1671  self._options.customise_commands = self._options.customise_commands + " \n"
1672  self._options.customise_commands = self._options.customise_commands + "process.patTrigger.processName = \""+self._options.hltProcess+"\""
1673 # self.renameHLTprocessInSequence(sequence)
1674 
1675  return
1676 
1677  def prepare_EI(self, sequence = None):
1678  ''' Enrich the schedule with event interpretation '''
1679  from Configuration.StandardSequences.EventInterpretation import EventInterpretation
1680  if sequence in EventInterpretation:
1681  self.EIDefaultCFF = EventInterpretation[sequence]
1682  sequence = 'EIsequence'
1683  else:
1684  raise Exception('Cannot set %s event interpretation'%( sequence) )
1685  self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1686  self.scheduleSequence(sequence.split('.')[-1],'eventinterpretaion_step')
1687  return
1688 
1689  def prepare_SKIM(self, sequence = "all"):
1690  ''' Enrich the schedule with skimming fragments'''
1691  skimConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.SKIMDefaultCFF)
1692  sequence = sequence.split('.')[-1]
1693 
1694  skimlist=sequence.split('+')
1695  ## support @Mu+DiJet+@Electron configuration via autoSkim.py
1696  from Configuration.Skimming.autoSkim import autoSkim
1697  self.expandMapping(skimlist,autoSkim)
1698 
1699  #print "dictionnary for skims:",skimConfig.__dict__
1700  for skim in skimConfig.__dict__:
1701  skimstream = getattr(skimConfig,skim)
1702  if isinstance(skimstream,cms.Path):
1703  #black list the alca path so that they do not appear in the cfg
1704  self.blacklist_paths.append(skimstream)
1705  if (not isinstance(skimstream,cms.FilteredStream)):
1706  continue
1707  shortname = skim.replace('SKIMStream','')
1708  if (sequence=="all"):
1709  self.addExtraStream(skim,skimstream)
1710  elif (shortname in skimlist):
1711  self.addExtraStream(skim,skimstream)
1712  #add a DQM eventcontent for this guy
1713  if self._options.datatier=='DQM':
1714  self.process.load(self.EVTCONTDefaultCFF)
1715  skimstreamDQM = cms.FilteredStream(
1716  responsible = skimstream.responsible,
1717  name = skimstream.name+'DQM',
1718  paths = skimstream.paths,
1719  selectEvents = skimstream.selectEvents,
1720  content = self._options.datatier+'EventContent',
1721  dataTier = cms.untracked.string(self._options.datatier)
1722  )
1723  self.addExtraStream(skim+'DQM',skimstreamDQM)
1724  for i in range(skimlist.count(shortname)):
1725  skimlist.remove(shortname)
1726 
1727 
1728 
1729  if (skimlist.__len__()!=0 and sequence!="all"):
1730  print 'WARNING, possible typo with SKIM:'+'+'.join(skimlist)
1731  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1732 
1733  def prepare_USER(self, sequence = None):
1734  ''' Enrich the schedule with a user defined sequence '''
1735  self.loadDefaultOrSpecifiedCFF(sequence,self.USERDefaultCFF)
1736  self.scheduleSequence(sequence.split('.')[-1],'user_step')
1737  return
1738 
1739  def prepare_POSTRECO(self, sequence = None):
1740  """ Enrich the schedule with the postreco step """
1741  self.loadAndRemember(self.POSTRECODefaultCFF)
1742  self.scheduleSequence('postreco_generator','postreco_step')
1743  return
1744 
1745 
1746  def prepare_VALIDATION(self, sequence = 'validation'):
1747  print sequence,"in preparing validation"
1748  self.loadDefaultOrSpecifiedCFF(sequence,self.VALIDATIONDefaultCFF)
1749  from Validation.Configuration.autoValidation import autoValidation
1750  #in case VALIDATION:something:somethingelse -> something,somethingelse
1751  sequence=sequence.split('.')[-1]
1752  if sequence.find(',')!=-1:
1753  prevalSeqName=sequence.split(',')[0].split('+')
1754  valSeqName=sequence.split(',')[1].split('+')
1755  self.expandMapping(prevalSeqName,autoValidation,index=0)
1756  self.expandMapping(valSeqName,autoValidation,index=1)
1757  else:
1758  if '@' in sequence:
1759  prevalSeqName=sequence.split('+')
1760  valSeqName=sequence.split('+')
1761  self.expandMapping(prevalSeqName,autoValidation,index=0)
1762  self.expandMapping(valSeqName,autoValidation,index=1)
1763  else:
1764  postfix=''
1765  if sequence:
1766  postfix='_'+sequence
1767  prevalSeqName=['prevalidation'+postfix]
1768  valSeqName=['validation'+postfix]
1769  if not hasattr(self.process,valSeqName[0]):
1770  prevalSeqName=['']
1771  valSeqName=[sequence]
1772 
1773  def NFI(index):
1774  ##name from index, required to keep backward compatibility
1775  if index==0:
1776  return ''
1777  else:
1778  return '%s'%index
1779 
1780 
1781  #rename the HLT process in validation steps
1782  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1783  for s in valSeqName+prevalSeqName:
1784  if s:
1786  for (i,s) in enumerate(prevalSeqName):
1787  if s:
1788  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1789  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1790 
1791  for (i,s) in enumerate(valSeqName):
1792  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1793  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1794 
1795  #needed in case the miniAODValidation sequence is run starting from AODSIM
1796  if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1797  return
1798 
1799  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1800  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1801  self._options.restoreRNDSeeds=True
1802 
1803  if not 'DIGI' in self.stepMap and not self._options.fast:
1804  self.executeAndRemember("process.mix.playback = True")
1805  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1806  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1807  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1808 
1809  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1810  #will get in the schedule, smoothly
1811  for (i,s) in enumerate(valSeqName):
1812  getattr(self.process,'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,'validation_step%s'%NFI(i))._seq
1813 
1814  return
1815 
1816 
1818  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1819  It will climb down within PSets, VPSets and VInputTags to find its target"""
1820  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1821  self._paramReplace = paramReplace
1822  self._paramSearch = paramSearch
1823  self._verbose = verbose
1824  self._whitelist = whitelist
1825 
1826  def doIt(self,pset,base):
1827  if isinstance(pset, cms._Parameterizable):
1828  for name in pset.parameters_().keys():
1829  # skip whitelisted parameters
1830  if name in self._whitelist:
1831  continue
1832  # if I use pset.parameters_().items() I get copies of the parameter values
1833  # so I can't modify the nested pset
1834  value = getattr(pset,name)
1835  type = value.pythonTypeName()
1836  if type in ('cms.PSet', 'cms.untracked.PSet'):
1837  self.doIt(value,base+"."+name)
1838  elif type in ('cms.VPSet', 'cms.untracked.VPSet'):
1839  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1840  elif type in ('cms.string', 'cms.untracked.string'):
1841  if value.value() == self._paramSearch:
1842  if self._verbose: print "set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace)
1843  setattr(pset, name,self._paramReplace)
1844  elif type in ('cms.VInputTag', 'cms.untracked.VInputTag'):
1845  for (i,n) in enumerate(value):
1846  if not isinstance(n, cms.InputTag):
1847  n=cms.InputTag(n)
1848  if n.processName == self._paramSearch:
1849  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1850  if self._verbose:print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace)
1851  setattr(n,"processName",self._paramReplace)
1852  value[i]=n
1853  elif type in ('cms.vstring', 'cms.untracked.vstring'):
1854  for (i,n) in enumerate(value):
1855  if n==self._paramSearch:
1856  getattr(pset,name)[i]=self._paramReplace
1857  elif type in ('cms.InputTag', 'cms.untracked.InputTag'):
1858  if value.processName == self._paramSearch:
1859  if self._verbose: print "set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace)
1860  setattr(getattr(pset, name),"processName",self._paramReplace)
1861 
1862  def enter(self,visitee):
1863  label = ''
1864  try:
1865  label = visitee.label()
1866  except AttributeError:
1867  label = '<Module not in a Process>'
1868  except:
1869  label = 'other execption'
1870  self.doIt(visitee, label)
1871 
1872  def leave(self,visitee):
1873  pass
1874 
1875  #visit a sequence to repalce all input tags
1876  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1877  print "Replacing all InputTag %s => %s"%(oldT,newT)
1878  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1879  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1880  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1881  if not loadMe in self.additionalCommands:
1882  self.additionalCommands.append(loadMe)
1883  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1884 
1885  #change the process name used to address HLT results in any sequence
1886  def renameHLTprocessInSequence(self,sequence,proc=None,HLTprocess='HLT'):
1887  if self._options.hltProcess:
1888  proc=self._options.hltProcess
1889  else:
1890  proc=self.process.name_()
1891  if proc==HLTprocess: return
1892  # look up all module in dqm sequence
1893  print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1894  getattr(self.process,sequence).visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess,proc,whitelist = ("subSystemFolder",)))
1895  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1896  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1897  self.additionalCommands.append('process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1898 
1899 
1900  def expandMapping(self,seqList,mapping,index=None):
1901  maxLevel=20
1902  level=0
1903  while '@' in repr(seqList) and level<maxLevel:
1904  level+=1
1905  for specifiedCommand in seqList:
1906  if specifiedCommand.startswith('@'):
1907  location=specifiedCommand[1:]
1908  if not location in mapping:
1909  raise Exception("Impossible to map "+location+" from "+repr(mapping))
1910  mappedTo=mapping[location]
1911  if index!=None:
1912  mappedTo=mappedTo[index]
1913  seqList.remove(specifiedCommand)
1914  seqList.extend(mappedTo.split('+'))
1915  break;
1916  if level==maxLevel:
1917  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
1918 
1919  def prepare_DQM(self, sequence = 'DQMOffline'):
1920  # this one needs replacement
1921 
1922  self.loadDefaultOrSpecifiedCFF(sequence,self.DQMOFFLINEDefaultCFF)
1923  sequenceList=sequence.split('.')[-1].split('+')
1924  postSequenceList=sequence.split('.')[-1].split('+')
1925  from DQMOffline.Configuration.autoDQM import autoDQM
1926  self.expandMapping(sequenceList,autoDQM,index=0)
1927  self.expandMapping(postSequenceList,autoDQM,index=1)
1928 
1929  if len(set(sequenceList))!=len(sequenceList):
1930  sequenceList=list(set(sequenceList))
1931  print "Duplicate entries for DQM:, using",sequenceList
1932 
1933  pathName='dqmoffline_step'
1934  for (i,sequence) in enumerate(sequenceList):
1935  if (i!=0):
1936  pathName='dqmoffline_%d_step'%(i)
1937 
1938  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
1939  self.renameHLTprocessInSequence(sequence)
1940 
1941  setattr(self.process,pathName, cms.EndPath( getattr(self.process,sequence ) ) )
1942  self.schedule.append(getattr(self.process,pathName))
1943 
1944  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1945  #will get in the schedule, smoothly
1946  getattr(self.process,pathName).insert(0,self.process.genstepfilter)
1947 
1948  pathName='dqmofflineOnPAT_step'
1949  for (i,sequence) in enumerate(postSequenceList):
1950  if (i!=0):
1951  pathName='dqmofflineOnPAT_%d_step'%(i)
1952 
1953  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1954  self.schedule.append(getattr(self.process,pathName))
1955 
1956  def prepare_HARVESTING(self, sequence = None):
1957  """ Enrich the process with harvesting step """
1958  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
1959  self.loadAndRemember(self.DQMSaverCFF)
1960 
1961  harvestingConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.HARVESTINGDefaultCFF)
1962  sequence = sequence.split('.')[-1]
1963 
1964  # decide which HARVESTING paths to use
1965  harvestingList = sequence.split("+")
1966  from DQMOffline.Configuration.autoDQM import autoDQM
1967  from Validation.Configuration.autoValidation import autoValidation
1968  import copy
1969  combined_mapping = copy.deepcopy( autoDQM )
1970  combined_mapping.update( autoValidation )
1971  self.expandMapping(harvestingList,combined_mapping,index=-1)
1972 
1973  if len(set(harvestingList))!=len(harvestingList):
1974  harvestingList=list(set(harvestingList))
1975  print "Duplicate entries for HARVESTING, using",harvestingList
1976 
1977  for name in harvestingList:
1978  if not name in harvestingConfig.__dict__:
1979  print name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1980  continue
1981  harvestingstream = getattr(harvestingConfig,name)
1982  if isinstance(harvestingstream,cms.Path):
1983  self.schedule.append(harvestingstream)
1984  self.blacklist_paths.append(harvestingstream)
1985  if isinstance(harvestingstream,cms.Sequence):
1986  setattr(self.process,name+"_step",cms.Path(harvestingstream))
1987  self.schedule.append(getattr(self.process,name+"_step"))
1988 
1989  self.scheduleSequence('DQMSaver','dqmsave_step')
1990  return
1991 
1992  def prepare_ALCAHARVEST(self, sequence = None):
1993  """ Enrich the process with AlCaHarvesting step """
1994  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
1995  sequence=sequence.split(".")[-1]
1996 
1997  # decide which AlcaHARVESTING paths to use
1998  harvestingList = sequence.split("+")
1999 
2000 
2001 
2002  from Configuration.AlCa.autoPCL import autoPCL
2003  self.expandMapping(harvestingList,autoPCL)
2004 
2005  for name in harvestingConfig.__dict__:
2006  harvestingstream = getattr(harvestingConfig,name)
2007  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2008  self.schedule.append(harvestingstream)
2009  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2010  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2011  harvestingList.remove(name)
2012  # append the common part at the end of the sequence
2013  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2014  self.schedule.append(lastStep)
2015 
2016  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2017  print "The following harvesting could not be found : ", harvestingList
2018  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2019 
2020 
2021 
2022  def prepare_ENDJOB(self, sequence = 'endOfProcess'):
2023  self.loadDefaultOrSpecifiedCFF(sequence,self.ENDJOBDefaultCFF)
2024  self.scheduleSequenceAtEnd(sequence.split('.')[-1],'endjob_step')
2025  return
2026 
2028  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2029  self.schedule.append(self.process.reconstruction)
2030 
2031 
2032  def build_production_info(self, evt_type, evtnumber):
2033  """ Add useful info for the production. """
2034  self.process.configurationMetadata=cms.untracked.PSet\
2035  (version=cms.untracked.string("$Revision: 1.19 $"),
2036  name=cms.untracked.string("Applications"),
2037  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2038  )
2039 
2040  self.addedObjects.append(("Production Info","configurationMetadata"))
2041 
2042 
2043  def prepare(self, doChecking = False):
2044  """ Prepare the configuration string and add missing pieces."""
2045 
2046  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2047  self.addMaxEvents()
2048  if self.with_input:
2049  self.addSource()
2050  self.addStandardSequences()
2051  ##adding standard sequences might change the inputEventContent option and therefore needs to be finalized after
2052  self.completeInputCommand()
2053  self.addConditions()
2054 
2055 
2056  outputModuleCfgCode=""
2057  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2058  outputModuleCfgCode=self.addOutput()
2059 
2060  self.addCommon()
2061 
2062  self.pythonCfgCode = "# Auto generated configuration file\n"
2063  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2064  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2065  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2066  if hasattr(self._options,"era") and self._options.era :
2067  self.pythonCfgCode += "from Configuration.StandardSequences.Eras import eras\n\n"
2068  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"'" # Start of the line, finished after the loop
2069  # Multiple eras can be specified in a comma seperated list
2070  for requestedEra in self._options.era.split(",") :
2071  self.pythonCfgCode += ",eras."+requestedEra
2072  self.pythonCfgCode += ")\n\n" # end of the line
2073  else :
2074  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"')\n\n"
2075 
2076  self.pythonCfgCode += "# import of standard configurations\n"
2077  for module in self.imports:
2078  self.pythonCfgCode += ("process.load('"+module+"')\n")
2079 
2080  # production info
2081  if not hasattr(self.process,"configurationMetadata"):
2082  self.build_production_info(self._options.evt_type, self._options.number)
2083  else:
2084  #the PSet was added via a load
2085  self.addedObjects.append(("Production Info","configurationMetadata"))
2086 
2087  self.pythonCfgCode +="\n"
2088  for comment,object in self.addedObjects:
2089  if comment!="":
2090  self.pythonCfgCode += "\n# "+comment+"\n"
2091  self.pythonCfgCode += dumpPython(self.process,object)
2092 
2093  # dump the output definition
2094  self.pythonCfgCode += "\n# Output definition\n"
2095  self.pythonCfgCode += outputModuleCfgCode
2096 
2097  # dump all additional outputs (e.g. alca or skim streams)
2098  self.pythonCfgCode += "\n# Additional output definition\n"
2099  #I do not understand why the keys are not normally ordered.
2100  nl=self.additionalOutputs.keys()
2101  nl.sort()
2102  for name in nl:
2103  output = self.additionalOutputs[name]
2104  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2105  tmpOut = cms.EndPath(output)
2106  setattr(self.process,name+'OutPath',tmpOut)
2107  self.schedule.append(tmpOut)
2108 
2109  # dump all additional commands
2110  self.pythonCfgCode += "\n# Other statements\n"
2111  for command in self.additionalCommands:
2112  self.pythonCfgCode += command + "\n"
2113 
2114  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2115  for object in self._options.inlineObjets.split(','):
2116  if not object:
2117  continue
2118  if not hasattr(self.process,object):
2119  print 'cannot inline -'+object+'- : not known'
2120  else:
2121  self.pythonCfgCode +='\n'
2122  self.pythonCfgCode +=dumpPython(self.process,object)
2123 
2124  # dump all paths
2125  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2126  for path in self.process.paths:
2127  if getattr(self.process,path) not in self.blacklist_paths:
2128  self.pythonCfgCode += dumpPython(self.process,path)
2129 
2130  for endpath in self.process.endpaths:
2131  if getattr(self.process,endpath) not in self.blacklist_paths:
2132  self.pythonCfgCode += dumpPython(self.process,endpath)
2133 
2134  # dump the schedule
2135  self.pythonCfgCode += "\n# Schedule definition\n"
2136  result = "process.schedule = cms.Schedule("
2137 
2138  # handling of the schedule
2139  self.process.schedule = cms.Schedule()
2140  for item in self.schedule:
2141  if not isinstance(item, cms.Schedule):
2142  self.process.schedule.append(item)
2143  else:
2144  self.process.schedule.extend(item)
2145 
2146  if hasattr(self.process,"HLTSchedule"):
2147  beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2148  afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2149  pathNames = ['process.'+p.label_() for p in beforeHLT]
2150  result += ','.join(pathNames)+')\n'
2151  result += 'process.schedule.extend(process.HLTSchedule)\n'
2152  pathNames = ['process.'+p.label_() for p in afterHLT]
2153  result += 'process.schedule.extend(['+','.join(pathNames)+'])\n'
2154  else:
2155  pathNames = ['process.'+p.label_() for p in self.schedule]
2156  result ='process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2157 
2158  self.pythonCfgCode += result
2159 
2160  if self._options.nThreads is not "1":
2161  self.pythonCfgCode +="\n"
2162  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2163  self.pythonCfgCode +="process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+")\n"
2164  self.pythonCfgCode +="process.options.numberOfStreams=cms.untracked.uint32(0)\n"
2165  #repacked version
2166  if self._options.isRepacked:
2167  self.pythonCfgCode +="\n"
2168  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2169  self.pythonCfgCode +="MassReplaceInputTag(process)\n"
2170  MassReplaceInputTag(self.process)
2171 
2172  # special treatment in case of production filter sequence 2/2
2173  if self.productionFilterSequence:
2174  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2175  self.pythonCfgCode +='for path in process.paths:\n'
2176  if len(self.conditionalPaths):
2177  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2178  if len(self.excludedPaths):
2179  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2180  self.pythonCfgCode +='\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2181  pfs = getattr(self.process,self.productionFilterSequence)
2182  for path in self.process.paths:
2183  if not path in self.conditionalPaths: continue
2184  if path in self.excludedPaths: continue
2185  getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2186 
2187 
2188  # dump customise fragment
2189  self.pythonCfgCode += self.addCustomise()
2190 
2191  if self._options.runUnscheduled:
2192  # prune and delete paths
2193  #this is not supporting the blacklist at this point since I do not understand it
2194  self.pythonCfgCode+="#do not add changes to your config after this point (unless you know what you are doing)\n"
2195  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import convertToUnscheduled\n"
2196  self.pythonCfgCode+="process=convertToUnscheduled(process)\n"
2197 
2198  from FWCore.ParameterSet.Utilities import convertToUnscheduled
2199  self.process=convertToUnscheduled(self.process)
2200 
2201  #now add the unscheduled stuff
2202  for module in self.importsUnsch:
2203  self.process.load(module)
2204  self.pythonCfgCode += ("process.load('"+module+"')\n")
2205 
2206  #and clean the unscheduled stuff
2207  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import cleanUnscheduled\n"
2208  self.pythonCfgCode+="process=cleanUnscheduled(process)\n"
2209 
2210  from FWCore.ParameterSet.Utilities import cleanUnscheduled
2211  self.process=cleanUnscheduled(self.process)
2212 
2213  self.pythonCfgCode += self.addCustomise(1)
2214 
2215  self.pythonCfgCode += self.addCustomiseCmdLine()
2216 
2217  # Temporary hack to put the early delete customization after
2218  # everything else
2219  #
2220  # FIXME: remove when no longer needed
2221  self.pythonCfgCode += "\n# Add early deletion of temporary data products to reduce peak memory need\n"
2222  self.pythonCfgCode += "from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2223  self.pythonCfgCode += "process = customiseEarlyDelete(process)\n"
2224  self.pythonCfgCode += "# End adding early deletion\n"
2225  from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
2226  self.process = customiseEarlyDelete(self.process)
2227 
2228 
2229  # make the .io file
2230 
2231  if self._options.io:
2232  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2233  if not self._options.io.endswith('.io'): self._option.io+='.io'
2234  io=open(self._options.io,'w')
2235  ioJson={}
2236  if hasattr(self.process.source,"fileNames"):
2237  if len(self.process.source.fileNames.value()):
2238  ioJson['primary']=self.process.source.fileNames.value()
2239  if hasattr(self.process.source,"secondaryFileNames"):
2240  if len(self.process.source.secondaryFileNames.value()):
2241  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2242  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2243  ioJson['pileup']=self._options.pileup_input[4:]
2244  for (o,om) in self.process.outputModules_().items():
2245  ioJson[o]=om.fileName.value()
2246  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2247  if self.productionFilterSequence:
2248  ioJson['filter']=self.productionFilterSequence
2249  import json
2250  io.write(json.dumps(ioJson))
2251  return
2252 
def load(self, includeFile)
def cleanUnscheduled(proc)
Definition: Utilities.py:130
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF, unsch=0)
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:34
def prepare_L1REPACK(self, sequence=None)
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def doNotInlineEventContent(instance, label="cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)")
event content
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def ProcessName(process)
Definition: CustomConfigs.py:3
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_DIGIPREMIX(self, sequence=None)
def loadAndRemember(self, includeFile, unsch=0)
def prepare_RECO(self, sequence="reconstruction")
put the filtering path in the schedule
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def addCustomise(self, unsch=0)
def prepare_DIGIPREMIX_S2(self, sequence=None)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
now for a useful command
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
Definition: HCMethods.h:49
def prepare_DIGI(self, sequence=None)
def convertToUnscheduled(proc)
Definition: Utilities.py:92
def prepare_DIGI2RAW(self, sequence=None)
def MassReplaceInputTag(aProcess, oldT="rawDataCollector", newT="rawDataRepacker")
def throwAndSetRandomRun(source, runsAndProbs)
inliner
needs to be in reverse order
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def prepare_ALCAHARVEST(self, sequence=None)
def defineMixing(dict)
Definition: Mixing.py:172
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
Definition: helpers.py:271
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
double split
Definition: MVATrainer.cc:139
def completeInputCommand(self)
add the corresponding input content
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def prepare_PAT(self, sequence="miniAOD")