CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python
2 
3 __version__ = "$Revision: 1.19 $"
4 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
5 
6 import FWCore.ParameterSet.Config as cms
7 from FWCore.ParameterSet.Modules import _Module
8 import sys
9 import re
10 import collections
11 import FWCore.ParameterSet.DictTypes as DictTypes
12 class Options:
13  pass
14 
15 # the canonical defaults
16 defaultOptions = Options()
17 defaultOptions.datamix = 'DataOnSim'
18 defaultOptions.isMC=False
19 defaultOptions.isData=True
20 defaultOptions.step=''
21 defaultOptions.pileup='NoPileUp'
22 defaultOptions.pileup_input = None
23 defaultOptions.geometry = 'SimDB'
24 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
25 defaultOptions.magField = '38T'
26 defaultOptions.conditions = None
27 defaultOptions.useCondDBv1 = False
28 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
29 defaultOptions.harvesting= 'AtRunEnd'
30 defaultOptions.gflash = False
31 defaultOptions.number = -1
32 defaultOptions.number_out = None
33 defaultOptions.arguments = ""
34 defaultOptions.name = "NO NAME GIVEN"
35 defaultOptions.evt_type = ""
36 defaultOptions.filein = ""
37 defaultOptions.dasquery=""
38 defaultOptions.secondfilein = ""
39 defaultOptions.customisation_file = []
40 defaultOptions.customisation_file_unsch = []
41 defaultOptions.customise_commands = ""
42 defaultOptions.inline_custom=False
43 defaultOptions.particleTable = 'pythiapdt'
44 defaultOptions.particleTableList = ['pythiapdt','pdt']
45 defaultOptions.dirin = ''
46 defaultOptions.dirout = ''
47 defaultOptions.filetype = 'EDM'
48 defaultOptions.fileout = 'output.root'
49 defaultOptions.filtername = ''
50 defaultOptions.lazy_download = False
51 defaultOptions.custom_conditions = ''
52 defaultOptions.hltProcess = ''
53 defaultOptions.eventcontent = None
54 defaultOptions.datatier = None
55 defaultOptions.inlineEventContent = True
56 defaultOptions.inlineObjets =''
57 defaultOptions.hideGen=False
58 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
59 defaultOptions.beamspot=None
60 defaultOptions.outputDefinition =''
61 defaultOptions.inputCommands = None
62 defaultOptions.outputCommands = None
63 defaultOptions.inputEventContent = ''
64 defaultOptions.dropDescendant = False
65 defaultOptions.relval = None
66 defaultOptions.slhc = None
67 defaultOptions.profile = None
68 defaultOptions.isRepacked = False
69 defaultOptions.restoreRNDSeeds = False
70 defaultOptions.donotDropOnInput = ''
71 defaultOptions.python_filename =''
72 defaultOptions.io=None
73 defaultOptions.lumiToProcess=None
74 defaultOptions.fast=False
75 defaultOptions.runsAndWeightsForMC = None
76 defaultOptions.runsScenarioForMC = None
77 defaultOptions.runUnscheduled = False
78 defaultOptions.timeoutOutput = False
79 defaultOptions.nThreads = '1'
80 
81 # some helper routines
82 def dumpPython(process,name):
83  theObject = getattr(process,name)
84  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
85  return "process."+name+" = " + theObject.dumpPython("process")
86  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
87  return "process."+name+" = " + theObject.dumpPython()+"\n"
88  else:
89  return "process."+name+" = " + theObject.dumpPython()+"\n"
90 def filesFromList(fileName,s=None):
91  import os
92  import FWCore.ParameterSet.Config as cms
93  prim=[]
94  sec=[]
95  for line in open(fileName,'r'):
96  if line.count(".root")>=2:
97  #two files solution...
98  entries=line.replace("\n","").split()
99  if not entries[0] in prim:
100  prim.append(entries[0])
101  if not entries[1] in sec:
102  sec.append(entries[1])
103  elif (line.find(".root")!=-1):
104  entry=line.replace("\n","")
105  if not entry in prim:
106  prim.append(entry)
107  if s:
108  if not hasattr(s,"fileNames"):
109  s.fileNames=cms.untracked.vstring(prim)
110  else:
111  s.fileNames.extend(prim)
112  if len(sec)!=0:
113  if not hasattr(s,"secondaryFileNames"):
114  s.secondaryFileNames=cms.untracked.vstring(sec)
115  else:
116  s.secondaryFileNames.extend(sec)
117  print "found files: ",prim
118  if len(prim)==0:
119  raise Exception("There are not files in input from the file list")
120  if len(sec)!=0:
121  print "found parent files:",sec
122  return (prim,sec)
123 
124 def filesFromDASQuery(query,s=None):
125  import os
126  import FWCore.ParameterSet.Config as cms
127  prim=[]
128  sec=[]
129  print "the query is",query
130  for line in os.popen('das_client.py --query "%s"'%(query)):
131  if line.count(".root")>=2:
132  #two files solution...
133  entries=line.replace("\n","").split()
134  if not entries[0] in prim:
135  prim.append(entries[0])
136  if not entries[1] in sec:
137  sec.append(entries[1])
138  elif (line.find(".root")!=-1):
139  entry=line.replace("\n","")
140  if not entry in prim:
141  prim.append(entry)
142  if s:
143  if not hasattr(s,"fileNames"):
144  s.fileNames=cms.untracked.vstring(prim)
145  else:
146  s.fileNames.extend(prim)
147  if len(sec)!=0:
148  if not hasattr(s,"secondaryFileNames"):
149  s.secondaryFileNames=cms.untracked.vstring(sec)
150  else:
151  s.secondaryFileNames.extend(sec)
152  print "found files: ",prim
153  if len(sec)!=0:
154  print "found parent files:",sec
155  return (prim,sec)
156 
157 def MassReplaceInputTag(aProcess,oldT="rawDataCollector",newT="rawDataRepacker"):
158  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
159  for s in aProcess.paths_().keys():
160  massSearchReplaceAnyInputTag(getattr(aProcess,s),oldT,newT)
161 
162 def anyOf(listOfKeys,dict,opt=None):
163  for k in listOfKeys:
164  if k in dict:
165  toReturn=dict[k]
166  dict.pop(k)
167  return toReturn
168  if opt!=None:
169  return opt
170  else:
171  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
172 
174  """The main building routines """
175 
176  def __init__(self, options, process = None, with_output = False, with_input = False ):
177  """options taken from old cmsDriver and optparse """
178 
179  options.outfile_name = options.dirout+options.fileout
180 
181  self._options = options
182 
183  if self._options.isData and options.isMC:
184  raise Exception("ERROR: You may specify only --data or --mc, not both")
185  #if not self._options.conditions:
186  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
187 
188  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
189  if 'ENDJOB' in self._options.step:
190  if (hasattr(self._options,"outputDefinition") and \
191  self._options.outputDefinition != '' and \
192  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
193  (hasattr(self._options,"datatier") and \
194  self._options.datatier and \
195  'DQMIO' in self._options.datatier):
196  print "removing ENDJOB from steps since not compatible with DQMIO dataTier"
197  self._options.step=self._options.step.replace(',ENDJOB','')
198 
199 
200 
201  # what steps are provided by this class?
202  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
203  self.stepMap={}
204  self.stepKeys=[]
205  for step in self._options.step.split(","):
206  if step=='': continue
207  stepParts = step.split(":")
208  stepName = stepParts[0]
209  if stepName not in stepList and not stepName.startswith('re'):
210  raise ValueError("Step "+stepName+" unknown")
211  if len(stepParts)==1:
212  self.stepMap[stepName]=""
213  elif len(stepParts)==2:
214  self.stepMap[stepName]=stepParts[1].split('+')
215  elif len(stepParts)==3:
216  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
217  else:
218  raise ValueError("Step definition "+step+" invalid")
219  self.stepKeys.append(stepName)
220 
221  #print "map of steps is:",self.stepMap
222 
223  self.with_output = with_output
224  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
225  self.with_output = False
226  self.with_input = with_input
227  if process == None:
228  self.process = cms.Process(self._options.name)
229  else:
230  self.process = process
231  self.imports = []
232  self.importsUnsch = []
233  self.define_Configs()
234  self.schedule = list()
235 
236  # we are doing three things here:
237  # creating a process to catch errors
238  # building the code to re-create the process
239 
240  self.additionalCommands = []
241  # TODO: maybe a list of to be dumped objects would help as well
242  self.blacklist_paths = []
243  self.addedObjects = []
244  self.additionalOutputs = {}
245 
246  self.productionFilterSequence = None
247  self.nextScheduleIsConditional=False
248  self.conditionalPaths=[]
249  self.excludedPaths=[]
250 
251  def profileOptions(self):
252  """
253  addIgProfService
254  Function to add the igprof profile service so that you can dump in the middle
255  of the run.
256  """
257  profileOpts = self._options.profile.split(':')
258  profilerStart = 1
259  profilerInterval = 100
260  profilerFormat = None
261  profilerJobFormat = None
262 
263  if len(profileOpts):
264  #type, given as first argument is unused here
265  profileOpts.pop(0)
266  if len(profileOpts):
267  startEvent = profileOpts.pop(0)
268  if not startEvent.isdigit():
269  raise Exception("%s is not a number" % startEvent)
270  profilerStart = int(startEvent)
271  if len(profileOpts):
272  eventInterval = profileOpts.pop(0)
273  if not eventInterval.isdigit():
274  raise Exception("%s is not a number" % eventInterval)
275  profilerInterval = int(eventInterval)
276  if len(profileOpts):
277  profilerFormat = profileOpts.pop(0)
278 
279 
280  if not profilerFormat:
281  profilerFormat = "%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace("_cfi", ""),
282  self._options.step,
283  self._options.pileup,
284  self._options.conditions,
285  self._options.datatier,
286  self._options.profileTypeLabel)
287  if not profilerJobFormat and profilerFormat.endswith(".gz"):
288  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
289  elif not profilerJobFormat:
290  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
291 
292  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
293 
294  def load(self,includeFile):
295  includeFile = includeFile.replace('/','.')
296  self.process.load(includeFile)
297  return sys.modules[includeFile]
298 
299  def loadAndRemember(self, includeFile,unsch=0):
300  """helper routine to load am memorize imports"""
301  # we could make the imports a on-the-fly data method of the process instance itself
302  # not sure if the latter is a good idea
303  includeFile = includeFile.replace('/','.')
304  if unsch==0:
305  self.imports.append(includeFile)
306  self.process.load(includeFile)
307  return sys.modules[includeFile]
308  else:
309  self.importsUnsch.append(includeFile)
310  return 0#sys.modules[includeFile]
311 
312  def executeAndRemember(self, command):
313  """helper routine to remember replace statements"""
314  self.additionalCommands.append(command)
315  if not command.strip().startswith("#"):
316  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
317  import re
318  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
319  #exec(command.replace("process.","self.process."))
320 
321  def addCommon(self):
322  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
323  self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring('ProductNotFound'),fileMode = cms.untracked.string('FULLMERGE'))
324  else:
325  self.process.options = cms.untracked.PSet( )
326 
327  if self._options.runUnscheduled:
328  self.process.options.allowUnscheduled=cms.untracked.bool(True)
329 
330  self.addedObjects.append(("","options"))
331 
332  if self._options.lazy_download:
333  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
334  stats = cms.untracked.bool(True),
335  enable = cms.untracked.bool(True),
336  cacheHint = cms.untracked.string("lazy-download"),
337  readHint = cms.untracked.string("read-ahead-buffered")
338  )
339  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
340 
341  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
342  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
343 
344  if self._options.profile:
345  (start, interval, eventFormat, jobFormat)=self.profileOptions()
346  self.process.IgProfService = cms.Service("IgProfService",
347  reportFirstEvent = cms.untracked.int32(start),
348  reportEventInterval = cms.untracked.int32(interval),
349  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
350  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
351  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
352 
353  def addMaxEvents(self):
354  """Here we decide how many evts will be processed"""
355  self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(int(self._options.number)))
356  if self._options.number_out:
357  self.process.maxEvents.output = cms.untracked.int32(int(self._options.number_out))
358  self.addedObjects.append(("","maxEvents"))
359 
360  def addSource(self):
361  """Here the source is built. Priority: file, generator"""
362  self.addedObjects.append(("Input source","source"))
363 
364  def filesFromOption(self):
365  for entry in self._options.filein.split(','):
366  print "entry",entry
367  if entry.startswith("filelist:"):
368  filesFromList(entry[9:],self.process.source)
369  elif entry.startswith("dbs:") or entry.startswith("das:"):
370  filesFromDASQuery('file dataset = %s'%(entry[4:]),self.process.source)
371  else:
372  self.process.source.fileNames.append(self._options.dirin+entry)
373  if self._options.secondfilein:
374  if not hasattr(self.process.source,"secondaryFileNames"):
375  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
376  for entry in self._options.secondfilein.split(','):
377  print "entry",entry
378  if entry.startswith("filelist:"):
379  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
380  elif entry.startswith("dbs:") or entry.startswith("das:"):
381  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:])))[0])
382  else:
383  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
384 
385  if self._options.filein or self._options.dasquery:
386  if self._options.filetype == "EDM":
387  self.process.source=cms.Source("PoolSource",
388  fileNames = cms.untracked.vstring(),
389  secondaryFileNames= cms.untracked.vstring())
390  filesFromOption(self)
391  elif self._options.filetype == "DAT":
392  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
393  filesFromOption(self)
394  elif self._options.filetype == "LHE":
395  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
396  if self._options.filein.startswith("lhe:"):
397  #list the article directory automatically
398  args=self._options.filein.split(':')
399  article=args[1]
400  print 'LHE input from article ',article
401  location='/store/lhe/'
402  import os
403  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
404  for line in textOfFiles:
405  for fileName in [x for x in line.split() if '.lhe' in x]:
406  self.process.source.fileNames.append(location+article+'/'+fileName)
407  if len(args)>2:
408  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
409  else:
410  filesFromOption(self)
411 
412 
413  elif self._options.filetype == "DQM":
414  self.process.source=cms.Source("DQMRootSource",
415  fileNames = cms.untracked.vstring())
416  filesFromOption(self)
417 
418  elif self._options.filetype == "DQMDAQ":
419  # FIXME: how to configure it if there are no input files specified?
420  self.process.source=cms.Source("DQMStreamerReader")
421 
422 
423  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
424  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
425 
426  if self._options.dasquery!='':
427  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
428  filesFromDASQuery(self._options.dasquery,self.process.source)
429 
430  if self._options.inputCommands:
431  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
432  for command in self._options.inputCommands.split(','):
433  # remove whitespace around the keep/drop statements
434  command = command.strip()
435  if command=='': continue
436  self.process.source.inputCommands.append(command)
437  if not self._options.dropDescendant:
438  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
439 
440  if self._options.lumiToProcess:
441  import FWCore.PythonUtilities.LumiList as LumiList
442  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
443 
444  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
445  if self.process.source is None:
446  self.process.source=cms.Source("EmptySource")
447 
448  # modify source in case of run-dependent MC
449  self.runsAndWeights=None
450  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
451  if not self._options.isMC :
452  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
453  if self._options.runsAndWeightsForMC:
454  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
455  else:
456  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
457  if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
458  __import__(RunsAndWeights[self._options.runsScenarioForMC])
459  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
460  else:
461  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
462 
463  if self.runsAndWeights:
464  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
465  ThrowAndSetRandomRun.throwAndSetRandomRun(self.process.source,self.runsAndWeights)
466  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
467  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
468 
469  return
470 
471  def addOutput(self):
472  """ Add output module to the process """
473  result=""
474  if self._options.outputDefinition:
475  if self._options.datatier:
476  print "--datatier & --eventcontent options ignored"
477 
478  #new output convention with a list of dict
479  outList = eval(self._options.outputDefinition)
480  for (id,outDefDict) in enumerate(outList):
481  outDefDictStr=outDefDict.__str__()
482  if not isinstance(outDefDict,dict):
483  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
484  #requires option: tier
485  theTier=anyOf(['t','tier','dataTier'],outDefDict)
486  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
487  ## event content
488  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
489  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
490  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
491  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
492  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
493  # module label has a particular role
494  if not theModuleLabel:
495  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
496  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
497  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
498  ]
499  for name in tryNames:
500  if not hasattr(self.process,name):
501  theModuleLabel=name
502  break
503  if not theModuleLabel:
504  raise Exception("cannot find a module label for specification: "+outDefDictStr)
505  if id==0:
506  defaultFileName=self._options.outfile_name
507  else:
508  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
509 
510  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
511  if not theFileName.endswith('.root'):
512  theFileName+='.root'
513 
514  if len(outDefDict.keys()):
515  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
516  if theStreamType=='DQMIO': theStreamType='DQM'
517  if theStreamType=='ALL':
518  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
519  else:
520  theEventContent = getattr(self.process, theStreamType+"EventContent")
521 
522  if theStreamType=='ALCARECO' and not theFilterName:
523  theFilterName='StreamALCACombined'
524 
525  CppType='PoolOutputModule'
526  if self._options.timeoutOutput:
527  CppType='TimeoutPoolOutputModule'
528  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
529  output = cms.OutputModule(CppType,
530  theEventContent.clone(),
531  fileName = cms.untracked.string(theFileName),
532  dataset = cms.untracked.PSet(
533  dataTier = cms.untracked.string(theTier),
534  filterName = cms.untracked.string(theFilterName))
535  )
536  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
537  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
538  if not theSelectEvent and hasattr(self.process,'filtering_step'):
539  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
540  if theSelectEvent:
541  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
542 
543  if hasattr(self.process,theModuleLabel):
544  raise Exception("the current process already has a module "+theModuleLabel+" defined")
545  #print "creating output module ",theModuleLabel
546  setattr(self.process,theModuleLabel,output)
547  outputModule=getattr(self.process,theModuleLabel)
548  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
549  path=getattr(self.process,theModuleLabel+'_step')
550  self.schedule.append(path)
551 
552  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
553  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"):
554  return label
555  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
556  if theExtraOutputCommands:
557  if not isinstance(theExtraOutputCommands,list):
558  raise Exception("extra ouput command in --option must be a list of strings")
559  if hasattr(self.process,theStreamType+"EventContent"):
560  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
561  else:
562  outputModule.outputCommands.extend(theExtraOutputCommands)
563 
564  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
565 
566  ##ends the --output options model
567  return result
568 
569  streamTypes=self._options.eventcontent.split(',')
570  tiers=self._options.datatier.split(',')
571  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
572  raise Exception("number of event content arguments does not match number of datatier arguments")
573 
574  # if the only step is alca we don't need to put in an output
575  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
576  return "\n"
577 
578  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
579  if streamType=='': continue
580  if streamType=='DQMIO': streamType='DQM'
581  theEventContent = getattr(self.process, streamType+"EventContent")
582  if i==0:
583  theFileName=self._options.outfile_name
584  theFilterName=self._options.filtername
585  else:
586  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
587  theFilterName=self._options.filtername
588  CppType='PoolOutputModule'
589  if self._options.timeoutOutput:
590  CppType='TimeoutPoolOutputModule'
591  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
592  output = cms.OutputModule(CppType,
593  theEventContent,
594  fileName = cms.untracked.string(theFileName),
595  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
596  filterName = cms.untracked.string(theFilterName)
597  )
598  )
599  if hasattr(self.process,"generation_step") and streamType!='LHE':
600  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
601  if hasattr(self.process,"filtering_step"):
602  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
603 
604  if streamType=='ALCARECO':
605  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
606 
607  if "MINIAOD" in streamType:
608  output.dropMetaData = cms.untracked.string('ALL')
609  output.fastCloning= cms.untracked.bool(False)
610  output.overrideInputFileSplitLevels = cms.untracked.bool(True)
611 
612  outputModuleName=streamType+'output'
613  setattr(self.process,outputModuleName,output)
614  outputModule=getattr(self.process,outputModuleName)
615  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
616  path=getattr(self.process,outputModuleName+'_step')
617  self.schedule.append(path)
618 
619  if self._options.outputCommands and streamType!='DQM':
620  for evct in self._options.outputCommands.split(','):
621  if not evct: continue
622  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
623 
624  if not self._options.inlineEventContent:
625  def doNotInlineEventContent(instance,label = "process."+streamType+"EventContent.outputCommands"):
626  return label
627  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
628 
629  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
630 
631  return result
632 
634  """
635  Add selected standard sequences to the process
636  """
637  # load the pile up file
638  if self._options.pileup:
639  pileupSpec=self._options.pileup.split(',')[0]
640 
641  # FastSim: GEN-mixing or DIGI-RECO mixing?
642  GEN_mixing = False
643  if self._options.fast and pileupSpec.find("GEN_") == 0:
644  GEN_mixing = True
645  pileupSpec = pileupSpec[4:]
646 
647  # Does the requested pile-up scenario exist?
648  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
649  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
650  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
651  if self._options.fast:
652  message += "\n-"*20+"\n additional options for FastSim (gen-mixing):\n" + "-"*20 + "\n" + '\n'.join(["GEN_" + x for x in Mixing.keys()]) + "\n"
653  raise Exception(message)
654 
655  # Put mixing parameters in a dictionary
656  if '.' in pileupSpec:
657  mixingDict={'file':pileupSpec}
658  elif pileupSpec.startswith('file:'):
659  mixingDict={'file':pileupSpec[5:]}
660  else:
661  import copy
662  mixingDict=copy.copy(Mixing[pileupSpec])
663  if len(self._options.pileup.split(','))>1:
664  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
665 
666  # Load the pu cfg file corresponding to the requested pu scenario
667  if 'file:' in pileupSpec:
668  #the file is local
669  self.process.load(mixingDict['file'])
670  print "inlining mixing module configuration"
671  self._options.inlineObjets+=',mix'
672  else:
673  self.loadAndRemember(mixingDict['file'])
674 
675  # FastSim: transform cfg of MixingModule from FullSim to FastSim
676  if self._options.fast:
677  if GEN_mixing:
678  self._options.customisation_file.insert(0,"FastSimulation/Configuration/MixingModule_Full2Fast.prepareGenMixing")
679  else:
680  self._options.customisation_file.insert(0,"FastSimulation/Configuration/MixingModule_Full2Fast.prepareDigiRecoMixing")
681 
682  mixingDict.pop('file')
683  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
684  if self._options.pileup_input:
685  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
686  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],))[0]
687  else:
688  mixingDict['F']=self._options.pileup_input.split(',')
689  specialization=defineMixing(mixingDict)
690  for command in specialization:
691  self.executeAndRemember(command)
692  if len(mixingDict)!=0:
693  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
694 
695 
696  # load the geometry file
697  try:
698  if len(self.stepMap):
699  self.loadAndRemember(self.GeometryCFF)
700  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
701  self.loadAndRemember(self.SimGeometryCFF)
702  if self.geometryDBLabel:
703  self.executeAndRemember('process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
704  except ImportError:
705  print "Geometry option",self._options.geometry,"unknown."
706  raise
707 
708  if len(self.stepMap):
709  self.loadAndRemember(self.magFieldCFF)
710 
711  for stepName in self.stepKeys:
712  stepSpec = self.stepMap[stepName]
713  print "Step:", stepName,"Spec:",stepSpec
714  if stepName.startswith('re'):
715  ##add the corresponding input content
716  if stepName[2:] not in self._options.donotDropOnInput:
717  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
718  stepName=stepName[2:]
719  if stepSpec=="":
720  getattr(self,"prepare_"+stepName)(sequence = getattr(self,stepName+"DefaultSeq"))
721  elif type(stepSpec)==list:
722  getattr(self,"prepare_"+stepName)(sequence = '+'.join(stepSpec))
723  elif type(stepSpec)==tuple:
724  getattr(self,"prepare_"+stepName)(sequence = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
725  else:
726  raise ValueError("Invalid step definition")
727 
728  if self._options.restoreRNDSeeds!=False:
729  #it is either True, or a process name
730  if self._options.restoreRNDSeeds==True:
731  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
732  else:
733  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
734  if self._options.inputEventContent or self._options.inputCommands:
735  if self._options.inputCommands:
736  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
737  else:
738  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
739 
740 
742  if self._options.inputEventContent:
743  import copy
744  def dropSecondDropStar(iec):
745  #drop occurence of 'drop *' in the list
746  count=0
747  for item in iec:
748  if item=='drop *':
749  if count!=0:
750  iec.remove(item)
751  count+=1
752 
753 
754  ## allow comma separated input eventcontent
755  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
756  for evct in self._options.inputEventContent.split(','):
757  if evct=='': continue
758  theEventContent = getattr(self.process, evct+"EventContent")
759  if hasattr(theEventContent,'outputCommands'):
760  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
761  if hasattr(theEventContent,'inputCommands'):
762  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
763 
764  dropSecondDropStar(self.process.source.inputCommands)
765 
766  if not self._options.dropDescendant:
767  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
768 
769 
770  return
771 
772  def addConditions(self):
773  """Add conditions to the process"""
774  if not self._options.conditions: return
775 
776  if 'FrontierConditions_GlobalTag' in self._options.conditions:
777  print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line'
778  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
779 
780  self.loadAndRemember(self.ConditionsDefaultCFF)
781 
782  if self._options.useCondDBv1:
783  from Configuration.AlCa.GlobalTag import GlobalTag
784  else:
785  from Configuration.AlCa.GlobalTag_condDBv2 import GlobalTag
786 
787  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
788 
789  if self._options.useCondDBv1:
790  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
791  else:
792  self.additionalCommands.append('from Configuration.AlCa.GlobalTag_condDBv2 import GlobalTag')
793 
794  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
795 
796  if self._options.slhc:
797  self.loadAndRemember("SLHCUpgradeSimulations/Geometry/fakeConditions_%s_cff"%(self._options.slhc,))
798 
799 
800  def addCustomise(self,unsch=0):
801  """Include the customise code """
802 
803  custOpt=[]
804  if unsch==0:
805  for c in self._options.customisation_file:
806  custOpt.extend(c.split(","))
807  else:
808  for c in self._options.customisation_file_unsch:
809  custOpt.extend(c.split(","))
810 
812  for opt in custOpt:
813  if opt=='': continue
814  if opt.count('.')>1:
815  raise Exception("more than . in the specification:"+opt)
816  fileName=opt.split('.')[0]
817  if opt.count('.')==0: rest='customise'
818  else:
819  rest=opt.split('.')[1]
820  if rest=='py': rest='customise' #catch the case of --customise file.py
821 
822  if fileName in custMap:
823  custMap[fileName].extend(rest.split('+'))
824  else:
825  custMap[fileName]=rest.split('+')
826 
827  if len(custMap)==0:
828  final_snippet='\n'
829  else:
830  final_snippet='\n# customisation of the process.\n'
831 
832  allFcn=[]
833  for opt in custMap:
834  allFcn.extend(custMap[opt])
835  for fcn in allFcn:
836  if allFcn.count(fcn)!=1:
837  raise Exception("cannot specify twice "+fcn+" as a customisation method")
838 
839  for f in custMap:
840  # let python search for that package and do syntax checking at the same time
841  packageName = f.replace(".py","").replace("/",".")
842  __import__(packageName)
843  package = sys.modules[packageName]
844 
845  # now ask the package for its definition and pick .py instead of .pyc
846  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
847 
848  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
849  if self._options.inline_custom:
850  for line in file(customiseFile,'r'):
851  if "import FWCore.ParameterSet.Config" in line:
852  continue
853  final_snippet += line
854  else:
855  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
856  for fcn in custMap[f]:
857  print "customising the process with",fcn,"from",f
858  if not hasattr(package,fcn):
859  #bound to fail at run time
860  raise Exception("config "+f+" has no function "+fcn)
861  #execute the command
862  self.process=getattr(package,fcn)(self.process)
863  #and print it in the configuration
864  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
865  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
866 
867  if len(custMap)!=0:
868  final_snippet += '\n# End of customisation functions\n'
869 
870  ### now for a useful command
871  if unsch==1 or not self._options.runUnscheduled:
872  if self._options.customise_commands:
873  import string
874  final_snippet +='\n# Customisation from command line'
875  for com in self._options.customise_commands.split('\\n'):
876  com=string.lstrip(com)
877  self.executeAndRemember(com)
878  final_snippet +='\n'+com
879 
880  return final_snippet
881 
882  #----------------------------------------------------------------------------
883  # here the methods to define the python includes for each step or
884  # conditions
885  #----------------------------------------------------------------------------
886  def define_Configs(self):
887  if len(self.stepMap):
888  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
889  if self._options.particleTable not in defaultOptions.particleTableList:
890  print 'Invalid particle table provided. Options are:'
891  print defaultOptions.particleTable
892  sys.exit(-1)
893  else:
894  if len(self.stepMap):
895  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
896 
897  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
898 
899  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
900  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
901  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
902  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
903  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
904  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
905  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
906  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
907  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
908  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
909  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
910  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
911  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
912  self.EIDefaultCFF=None
913  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
914  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
915  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
916  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
917  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
918  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
919  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
920  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
921  if self._options.useCondDBv1:
922  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
923  else:
924  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_condDBv2_cff"
925  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
926  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
927 
928  if "DATAMIX" in self.stepMap.keys():
929  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
930  if self._options.datamix == 'PreMix':
931  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDMPreMix_cff"
932  else:
933  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
934  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
935  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
936 
937  if "DIGIPREMIX" in self.stepMap.keys():
938  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_PreMix_cff"
939 
940  self.ALCADefaultSeq=None
941  self.LHEDefaultSeq='externalLHEProducer'
942  self.GENDefaultSeq='pgen'
943  self.SIMDefaultSeq='psim'
944  self.DIGIDefaultSeq='pdigi'
945  self.DIGIPREMIXDefaultSeq='pdigi'
946  self.DIGIPREMIX_S2DefaultSeq='pdigi'
947  self.DATAMIXDefaultSeq=None
948  self.DIGI2RAWDefaultSeq='DigiToRaw'
949  self.HLTDefaultSeq='GRun'
950  self.L1DefaultSeq=None
951  self.L1REPACKDefaultSeq='GT'
952  self.HARVESTINGDefaultSeq=None
953  self.ALCAHARVESTDefaultSeq=None
954  self.CFWRITERDefaultSeq=None
955  self.RAW2DIGIDefaultSeq='RawToDigi'
956  self.L1RecoDefaultSeq='L1Reco'
957  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
958  if 'RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap:
959  self.RECODefaultSeq='reconstruction'
960  else:
961  self.RECODefaultSeq='reconstruction_fromRECO'
962 
963  self.EIDefaultSeq='top'
964  self.POSTRECODefaultSeq=None
965  self.L1HwValDefaultSeq='L1HwVal'
966  self.DQMDefaultSeq='DQMOffline'
967  self.VALIDATIONDefaultSeq=''
968  self.ENDJOBDefaultSeq='endOfProcess'
969  self.REPACKDefaultSeq='DigiToRawRepack'
970  self.PATDefaultSeq='miniAOD'
971 
972  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
973 
974  if not self._options.beamspot:
975  self._options.beamspot=VtxSmearedDefaultKey
976 
977  # if its MC then change the raw2digi
978  if self._options.isMC==True:
979  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
980  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
981  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
982  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
983  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
984  else:
985  self._options.beamspot = None
986 
987  #patch for gen, due to backward incompatibility
988  if 'reGEN' in self.stepMap:
989  self.GENDefaultSeq='fixGenInfo'
990 
991  if self._options.scenario=='cosmics':
992  self._options.pileup='Cosmics'
993  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
994  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
995  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
996  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
997  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
998  if self._options.isMC==True:
999  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1000  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1001  self.RECODefaultSeq='reconstructionCosmics'
1002  self.DQMDefaultSeq='DQMOfflineCosmics'
1003 
1004  if self._options.scenario=='HeavyIons':
1005  if not self._options.beamspot:
1006  self._options.beamspot=VtxSmearedHIDefaultKey
1007  self.HLTDefaultSeq = 'HIon'
1008  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1009  self.VALIDATIONDefaultSeq=''
1010  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1011  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1012  self.RECODefaultSeq='reconstructionHeavyIons'
1013  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1014  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1015  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1016  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1017  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1018  if self._options.isMC==True:
1019  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1020 
1021 
1022  self.RAW2RECODefaultSeq=','.join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1023 
1024  self.USERDefaultSeq='user'
1025  self.USERDefaultCFF=None
1026 
1027  # the magnetic field
1028  if self._options.isData:
1029  if self._options.magField==defaultOptions.magField:
1030  print "magnetic field option forced to: AutoFromDBCurrent"
1031  self._options.magField='AutoFromDBCurrent'
1032  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1033  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1034 
1035  # the geometry
1036  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1037  self.geometryDBLabel=None
1038  simGeometry=''
1039  if self._options.fast:
1040  if 'start' in self._options.conditions.lower():
1041  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1042  else:
1043  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1044  else:
1045  def inGeometryKeys(opt):
1046  from Configuration.StandardSequences.GeometryConf import GeometryConf
1047  if opt in GeometryConf:
1048  return GeometryConf[opt]
1049  else:
1050  return opt
1051 
1052  geoms=self._options.geometry.split(',')
1053  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1054  if len(geoms)==2:
1055  #may specify the reco geometry
1056  if '/' in geoms[1] or '_cff' in geoms[1]:
1057  self.GeometryCFF=geoms[1]
1058  else:
1059  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1060 
1061  if (geoms[0].startswith('DB:')):
1062  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1063  self.geometryDBLabel=geoms[0][3:]
1064  print "with DB:"
1065  else:
1066  if '/' in geoms[0] or '_cff' in geoms[0]:
1067  self.SimGeometryCFF=geoms[0]
1068  else:
1069  simGeometry=geoms[0]
1070  if self._options.gflash==True:
1071  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1072  else:
1073  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1074 
1075  # synchronize the geometry configuration and the FullSimulation sequence to be used
1076  if simGeometry not in defaultOptions.geometryExtendedOptions:
1077  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1078 
1079  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1080  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1081  self._options.beamspot='NoSmear'
1082 
1083  # if fastsim switch event content
1084  if self._options.fast:
1085  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1086  self.SIMDefaultSeq = 'psim'
1087  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1088  self.RECODefaultSeq= 'reconstruction'
1089  self.EVTCONTDefaultCFF = "FastSimulation.Configuration.EventContent_cff"
1090  self.VALIDATIONDefaultCFF = "FastSimulation.Configuration.Validation_cff"
1091  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1092  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1093  self.DIGIDefaultCFF = 'FastSimulation.Configuration.Digi_cff'
1094  if self._options.datamix == 'PreMix':
1095  self.DIGIDefaultCFF="FastSimulation.Configuration.DigiDMPreMix_cff"
1096  if "DIGIPREMIX" in self.stepMap.keys():
1097  self.DIGIDefaultCFF="FastSimulation.Configuration.Digi_PreMix_cff"
1098  if "DATAMIX" in self.stepMap.keys():
1099  self.DATAMIXDefaultCFF="FastSimulation.Configuration.DataMixer"+self._options.datamix+"_cff"
1100 
1101  self.DIGIDefaultSeq = 'pdigi'
1102  self.L1EMDefaultCFF='FastSimulation.Configuration.SimL1Emulator_cff'
1103  self.L1RecoDefaultCFF='FastSimulation.Configuration.L1Reco_cff'
1104  self.DIGI2RAWDefaultCFF = 'FastSimulation.Configuration.DigiToRaw_cff'
1105  self.DIGI2RAWDefaultSeq = 'DigiToRaw'
1106  self.EVTCONTDefaultCFF = "FastSimulation.Configuration.EventContent_cff"
1107  self.VALIDATIONDefaultCFF = "FastSimulation.Configuration.Validation_cff"
1108 
1109 
1110 
1111  # Mixing
1112  if self._options.pileup=='default':
1113  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1114  self._options.pileup=MixingDefaultKey
1115  # temporary, until digi-reco mixing becomes standard in RelVals
1116  if self._options.fast:
1117  self._options.pileup="GEN_" + MixingDefaultKey
1118 
1119 
1120  #not driven by a default cff anymore
1121  if self._options.isData:
1122  self._options.pileup=None
1123 
1124  if self._options.slhc:
1125  self.GeometryCFF='SLHCUpgradeSimulations.Geometry.%s_cmsSimIdealGeometryXML_cff'%(self._options.slhc,)
1126  if 'stdgeom' not in self._options.slhc:
1127  self.SimGeometryCFF='SLHCUpgradeSimulations.Geometry.%s_cmsSimIdealGeometryXML_cff'%(self._options.slhc,)
1128  self.DIGIDefaultCFF='SLHCUpgradeSimulations/Geometry/Digi_%s_cff'%(self._options.slhc,)
1129  if self._options.pileup!=defaultOptions.pileup:
1130  self._options.pileup='SLHC_%s_%s'%(self._options.pileup,self._options.slhc)
1131 
1132  self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1133 
1134  # for alca, skims, etc
1135  def addExtraStream(self,name,stream,workflow='full'):
1136  # define output module and go from there
1137  output = cms.OutputModule("PoolOutputModule")
1138  if stream.selectEvents.parameters_().__len__()!=0:
1139  output.SelectEvents = stream.selectEvents
1140  else:
1141  output.SelectEvents = cms.untracked.PSet()
1142  output.SelectEvents.SelectEvents=cms.vstring()
1143  if isinstance(stream.paths,tuple):
1144  for path in stream.paths:
1145  output.SelectEvents.SelectEvents.append(path.label())
1146  else:
1147  output.SelectEvents.SelectEvents.append(stream.paths.label())
1148 
1149 
1150 
1151  if isinstance(stream.content,str):
1152  evtPset=getattr(self.process,stream.content)
1153  for p in evtPset.parameters_():
1154  setattr(output,p,getattr(evtPset,p))
1155  if not self._options.inlineEventContent:
1156  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1157  return label
1158  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1159  else:
1160  output.outputCommands = stream.content
1161 
1162 
1163  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1164 
1165  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1166  filterName = cms.untracked.string(stream.name))
1167 
1168  if self._options.filtername:
1169  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1170 
1171  #add an automatic flushing to limit memory consumption
1172  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1173 
1174  if workflow in ("producers,full"):
1175  if isinstance(stream.paths,tuple):
1176  for path in stream.paths:
1177  self.schedule.append(path)
1178  else:
1179  self.schedule.append(stream.paths)
1180 
1181 
1182  # in case of relvals we don't want to have additional outputs
1183  if (not self._options.relval) and workflow in ("full","output"):
1184  self.additionalOutputs[name] = output
1185  setattr(self.process,name,output)
1186 
1187  if workflow == 'output':
1188  # adjust the select events to the proper trigger results from previous process
1189  filterList = output.SelectEvents.SelectEvents
1190  for i, filter in enumerate(filterList):
1191  filterList[i] = filter+":"+self._options.triggerResultsProcess
1192 
1193  return output
1194 
1195  #----------------------------------------------------------------------------
1196  # here the methods to create the steps. Of course we are doing magic here ;)
1197  # prepare_STEPNAME modifies self.process and what else's needed.
1198  #----------------------------------------------------------------------------
1199 
1200  def loadDefaultOrSpecifiedCFF(self, sequence,defaultCFF,unsch=0):
1201  if ( len(sequence.split('.'))==1 ):
1202  l=self.loadAndRemember(defaultCFF,unsch)
1203  elif ( len(sequence.split('.'))==2 ):
1204  l=self.loadAndRemember(sequence.split('.')[0],unsch)
1205  sequence=sequence.split('.')[1]
1206  else:
1207  print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a"
1208  print sequence,"not recognized"
1209  raise
1210  return l
1211 
1212  def scheduleSequence(self,seq,prefix,what='Path'):
1213  if '*' in seq:
1214  #create only one path with all sequences in it
1215  for i,s in enumerate(seq.split('*')):
1216  if i==0:
1217  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1218  else:
1219  p=getattr(self.process,prefix)
1220  p+=getattr(self.process, s)
1221  self.schedule.append(getattr(self.process,prefix))
1222  return
1223  else:
1224  #create as many path as many sequences
1225  if not '+' in seq:
1226  if self.nextScheduleIsConditional:
1227  self.conditionalPaths.append(prefix)
1228  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1229  self.schedule.append(getattr(self.process,prefix))
1230  else:
1231  for i,s in enumerate(seq.split('+')):
1232  sn=prefix+'%d'%(i)
1233  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1234  self.schedule.append(getattr(self.process,sn))
1235  return
1236 
1237  def scheduleSequenceAtEnd(self,seq,prefix):
1238  self.scheduleSequence(seq,prefix,what='EndPath')
1239  return
1240 
1241  def prepare_ALCAPRODUCER(self, sequence = None):
1242  self.prepare_ALCA(sequence, workflow = "producers")
1243 
1244  def prepare_ALCAOUTPUT(self, sequence = None):
1245  self.prepare_ALCA(sequence, workflow = "output")
1246 
1247  def prepare_ALCA(self, sequence = None, workflow = 'full'):
1248  """ Enrich the process with alca streams """
1249  alcaConfig=self.loadDefaultOrSpecifiedCFF(sequence,self.ALCADefaultCFF)
1250  sequence = sequence.split('.')[-1]
1251 
1252  # decide which ALCA paths to use
1253  alcaList = sequence.split("+")
1254  maxLevel=0
1255  from Configuration.AlCa.autoAlca import autoAlca
1256  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1257  self.expandMapping(alcaList,autoAlca)
1258 
1259  for name in alcaConfig.__dict__:
1260  alcastream = getattr(alcaConfig,name)
1261  shortName = name.replace('ALCARECOStream','')
1262  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1263  output = self.addExtraStream(name,alcastream, workflow = workflow)
1264  if 'DQM' in alcaList:
1265  if not self._options.inlineEventContent and hasattr(self.process,name):
1266  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1267  else:
1268  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1269 
1270  #rename the HLT process name in the alca modules
1271  if self._options.hltProcess or 'HLT' in self.stepMap:
1272  if isinstance(alcastream.paths,tuple):
1273  for path in alcastream.paths:
1274  self.renameHLTprocessInSequence(path.label())
1275  else:
1276  self.renameHLTprocessInSequence(alcastream.paths.label())
1277 
1278  for i in range(alcaList.count(shortName)):
1279  alcaList.remove(shortName)
1280 
1281  # DQM needs a special handling
1282  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1283  path = getattr(alcaConfig,name)
1284  self.schedule.append(path)
1285  alcaList.remove('DQM')
1286 
1287  if isinstance(alcastream,cms.Path):
1288  #black list the alca path so that they do not appear in the cfg
1289  self.blacklist_paths.append(alcastream)
1290 
1291 
1292  if len(alcaList) != 0:
1293  available=[]
1294  for name in alcaConfig.__dict__:
1295  alcastream = getattr(alcaConfig,name)
1296  if isinstance(alcastream,cms.FilteredStream):
1297  available.append(name.replace('ALCARECOStream',''))
1298  print "The following alcas could not be found "+str(alcaList)
1299  print "available ",available
1300  #print "verify your configuration, ignoring for now"
1301  raise Exception("The following alcas could not be found "+str(alcaList))
1302 
1303  def prepare_LHE(self, sequence = None):
1304  #load the fragment
1305  ##make it loadable
1306  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1307  print "Loading lhe fragment from",loadFragment
1308  __import__(loadFragment)
1309  self.process.load(loadFragment)
1310  ##inline the modules
1311  self._options.inlineObjets+=','+sequence
1312 
1313  getattr(self.process,sequence).nEvents = int(self._options.number)
1314 
1315  #schedule it
1316  self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1317  self.excludedPaths.append("lhe_step")
1318  self.schedule.append( self.process.lhe_step )
1319 
1320  def prepare_GEN(self, sequence = None):
1321  """ load the fragment of generator configuration """
1322  loadFailure=False
1323  #remove trailing .py
1324  #support old style .cfi by changing into something.cfi into something_cfi
1325  #remove python/ from the name
1326  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1327  #standard location of fragments
1328  if not '/' in loadFragment:
1329  loadFragment='Configuration.Generator.'+loadFragment
1330  else:
1331  loadFragment=loadFragment.replace('/','.')
1332  try:
1333  print "Loading generator fragment from",loadFragment
1334  __import__(loadFragment)
1335  except:
1336  loadFailure=True
1337  #if self.process.source and self.process.source.type_()=='EmptySource':
1338  if not (self._options.filein or self._options.dasquery):
1339  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1340 
1341  if not loadFailure:
1342  generatorModule=sys.modules[loadFragment]
1343  genModules=generatorModule.__dict__
1344  #remove lhe producer module since this should have been
1345  #imported instead in the LHE step
1346  if self.LHEDefaultSeq in genModules:
1347  del genModules[self.LHEDefaultSeq]
1348 
1349  if self._options.hideGen:
1350  self.loadAndRemember(loadFragment)
1351  else:
1352  self.process.load(loadFragment)
1353  # expose the objects from that fragment to the configuration
1354  import FWCore.ParameterSet.Modules as cmstypes
1355  for name in genModules:
1356  theObject = getattr(generatorModule,name)
1357  if isinstance(theObject, cmstypes._Module):
1358  self._options.inlineObjets=name+','+self._options.inlineObjets
1359  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1360  self._options.inlineObjets+=','+name
1361 
1362  if sequence == self.GENDefaultSeq or sequence == 'pgen_genonly':
1363  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1364  self.productionFilterSequence = 'ProductionFilterSequence'
1365  elif 'generator' in genModules:
1366  self.productionFilterSequence = 'generator'
1367 
1368  """ Enrich the schedule with the rest of the generation step """
1369  self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1370  genSeqName=sequence.split('.')[-1]
1371 
1372  if True:
1373  try:
1374  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1375  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1376  self.loadAndRemember(cffToBeLoaded)
1377  except ImportError:
1378  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1379 
1380  if self._options.scenario == 'HeavyIons':
1381  if self._options.pileup=='HiMixGEN':
1382  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1383  else:
1384  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1385 
1386  self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1387  self.schedule.append(self.process.generation_step)
1388 
1389  #register to the genstepfilter the name of the path (static right now, but might evolve)
1390  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1391 
1392  if 'reGEN' in self.stepMap:
1393  #stop here
1394  return
1395 
1396  """ Enrich the schedule with the summary of the filter step """
1397  #the gen filter in the endpath
1398  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1399  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1400  return
1401 
1402  def prepare_SIM(self, sequence = None):
1403  """ Enrich the schedule with the simulation step"""
1404  self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1405  if not self._options.fast:
1406  if self._options.gflash==True:
1407  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1408 
1409  if self._options.magField=='0T':
1410  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1411  else:
1412  if self._options.magField=='0T':
1413  self.executeAndRemember("process.famosSimHits.UseMagneticField = cms.bool(False)")
1414 
1415  self.scheduleSequence(sequence.split('.')[-1],'simulation_step')
1416  return
1417 
1418  def prepare_DIGI(self, sequence = None):
1419  """ Enrich the schedule with the digitisation step"""
1420  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1421 
1422  if self._options.gflash==True:
1423  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1424 
1425  if sequence == 'pdigi_valid':
1426  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1427 
1428  if sequence != 'pdigi_nogen' and sequence != 'pdigi_valid_nogen' and not self.process.source.type_()=='EmptySource':
1429  if self._options.inputEventContent=='':
1430  self._options.inputEventContent='REGEN'
1431  else:
1432  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1433 
1434 
1435  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1436  return
1437 
1438  def prepare_DIGIPREMIX(self, sequence = None):
1439  """ Enrich the schedule with the digitisation step"""
1440  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1441 
1442  self.loadAndRemember("SimGeneral/MixingModule/digi_noNoise_cfi")
1443  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1444 
1445  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1446  return
1447 
1448  def prepare_DIGIPREMIX_S2(self, sequence = None):
1449  """ Enrich the schedule with the digitisation step"""
1450  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1451 
1452  self.loadAndRemember("SimGeneral/MixingModule/digi_MixPreMix_cfi")
1453 
1454 
1455  if sequence == 'pdigi_valid':
1456  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1457  else:
1458  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1459 
1460  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1461  return
1462 
1463  def prepare_CFWRITER(self, sequence = None):
1464  """ Enrich the schedule with the crossing frame writer step"""
1465  self.loadAndRemember(self.CFWRITERDefaultCFF)
1466  self.scheduleSequence('pcfw','cfwriter_step')
1467  return
1468 
1469  def prepare_DATAMIX(self, sequence = None):
1470  """ Enrich the schedule with the digitisation step"""
1471  self.loadAndRemember(self.DATAMIXDefaultCFF)
1472  self.scheduleSequence('pdatamix','datamixing_step')
1473 
1474  if self._options.pileup_input:
1475  theFiles=''
1476  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1477  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],))[0]
1478  elif self._options.pileup_input.startswith("filelist:"):
1479  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1480  else:
1481  theFiles=self._options.pileup_input.split(',')
1482  #print theFiles
1483  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1484 
1485  return
1486 
1487  def prepare_DIGI2RAW(self, sequence = None):
1488  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGI2RAWDefaultCFF)
1489  self.scheduleSequence(sequence.split('.')[-1],'digi2raw_step')
1490  if "DIGIPREMIX" in self.stepMap.keys():
1491  self.executeAndRemember("process.esDigiToRaw.Label = cms.string('mix')") ##terrible hack - bypass zero suppression
1492  self.executeAndRemember("process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')") ##special readout mode for StripTracker
1493 
1494  return
1495 
1496  def prepare_REPACK(self, sequence = None):
1497  self.loadDefaultOrSpecifiedCFF(sequence,self.REPACKDefaultCFF)
1498  self.scheduleSequence(sequence.split('.')[-1],'digi2repack_step')
1499  return
1500 
1501  def prepare_L1(self, sequence = None):
1502  """ Enrich the schedule with the L1 simulation step"""
1503  assert(sequence == None)
1504  self.loadAndRemember(self.L1EMDefaultCFF)
1505  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1506  return
1507 
1508  def prepare_L1REPACK(self, sequence = None):
1509  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1510  supported = ['GT','GT1','GT2','GCTGT']
1511  if sequence in supported:
1512  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1513  if self._options.scenario == 'HeavyIons':
1514  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1515  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1516  else:
1517  print "L1REPACK with '",sequence,"' is not supported! Supported choices are: ",supported
1518  raise Exception('unsupported feature')
1519 
1520 
1521  def prepare_HLT(self, sequence = None):
1522  """ Enrich the schedule with the HLT simulation step"""
1523  if not sequence:
1524  print "no specification of the hlt menu has been given, should never happen"
1525  raise Exception('no HLT sequence provided')
1526 
1527  if '@' in sequence:
1528  # case where HLT:@something was provided
1529  from Configuration.HLT.autoHLT import autoHLT
1530  key = sequence[1:]
1531  if key in autoHLT:
1532  sequence = autoHLT[key]
1533  else:
1534  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1535 
1536  if ',' in sequence:
1537  #case where HLT:something:something was provided
1538  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1539  optionsForHLT = {}
1540  if self._options.scenario == 'HeavyIons':
1541  optionsForHLT['type'] = 'HIon'
1542  else:
1543  optionsForHLT['type'] = 'GRun'
1544  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.iteritems())
1545  if sequence == 'run,fromSource':
1546  if hasattr(self.process.source,'firstRun'):
1547  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1548  elif hasattr(self.process.source,'setRunNumber'):
1549  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1550  else:
1551  raise Exception('Cannot replace menu to load %s'%(sequence))
1552  else:
1553  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(sequence.replace(',',':'),optionsForHLTConfig))
1554  else:
1555  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % sequence)
1556 
1557  if self._options.isMC:
1558  if self._options.fast:
1559  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFastSim")
1560  else:
1561  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFullSim")
1562 
1563  if self._options.name != 'HLT':
1564  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1565  self.additionalCommands.append('process = ProcessName(process)')
1566  self.additionalCommands.append('')
1567  from HLTrigger.Configuration.CustomConfigs import ProcessName
1568  self.process = ProcessName(self.process)
1569 
1570  self.schedule.append(self.process.HLTSchedule)
1571  [self.blacklist_paths.append(path) for path in self.process.HLTSchedule if isinstance(path,(cms.Path,cms.EndPath))]
1572 
1573  #this is a fake, to be removed with fastim migration and HLT menu dump
1574  if self._options.fast:
1575  if not hasattr(self.process,'HLTEndSequence'):
1576  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1577 
1578 
1579  def prepare_RAW2RECO(self, sequence = None):
1580  if ','in sequence:
1581  seqReco=sequence.split(',')[1]
1582  seqDigi=sequence.split(',')[0]
1583  else:
1584  print "RAW2RECO requires two specifications",sequence,"insufficient"
1585 
1586  self.prepare_RAW2DIGI(seqDigi)
1587  self.prepare_RECO(seqReco)
1588  return
1589 
1590  def prepare_RAW2DIGI(self, sequence = "RawToDigi"):
1591  self.loadDefaultOrSpecifiedCFF(sequence,self.RAW2DIGIDefaultCFF)
1592  self.scheduleSequence(sequence,'raw2digi_step')
1593  # if self._options.isRepacked:
1594  #self.renameInputTagsInSequence(sequence)
1595  return
1596 
1597  def prepare_L1HwVal(self, sequence = 'L1HwVal'):
1598  ''' Enrich the schedule with L1 HW validation '''
1599  self.loadDefaultOrSpecifiedCFF(sequence,self.L1HwValDefaultCFF)
1600  #self.scheduleSequence(sequence.split('.')[-1],'l1hwval_step')
1601  print '\n\n\n DEPRECATED this has no action \n\n\n'
1602  return
1603 
1604  def prepare_L1Reco(self, sequence = "L1Reco"):
1605  ''' Enrich the schedule with L1 reconstruction '''
1606  self.loadDefaultOrSpecifiedCFF(sequence,self.L1RecoDefaultCFF)
1607  self.scheduleSequence(sequence.split('.')[-1],'L1Reco_step')
1608  return
1609 
1610  def prepare_L1TrackTrigger(self, sequence = "L1TrackTrigger"):
1611  ''' Enrich the schedule with L1 reconstruction '''
1613  self.scheduleSequence(sequence.split('.')[-1],'L1TrackTrigger_step')
1614  return
1615 
1616  def prepare_FILTER(self, sequence = None):
1617  ''' Enrich the schedule with a user defined filter sequence '''
1618  ## load the relevant part
1619  filterConfig=self.load(sequence.split('.')[0])
1620  filterSeq=sequence.split('.')[-1]
1621  ## print it in the configuration
1623  def __init__(self):
1624  self.inliner=''
1625  pass
1626  def enter(self,visitee):
1627  try:
1628  label=visitee.label()
1629  ##needs to be in reverse order
1630  self.inliner=label+','+self.inliner
1631  except:
1632  pass
1633  def leave(self,v): pass
1634 
1635  expander=PrintAllModules()
1636  getattr(self.process,filterSeq).visit( expander )
1637  self._options.inlineObjets+=','+expander.inliner
1638  self._options.inlineObjets+=','+filterSeq
1639 
1640  ## put the filtering path in the schedule
1641  self.scheduleSequence(filterSeq,'filtering_step')
1642  self.nextScheduleIsConditional=True
1643  ## put it before all the other paths
1644  self.productionFilterSequence = filterSeq
1645 
1646  return
1647 
1648  def prepare_RECO(self, sequence = "reconstruction"):
1649  ''' Enrich the schedule with reconstruction '''
1650  self.loadDefaultOrSpecifiedCFF(sequence,self.RECODefaultCFF)
1651  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_step')
1652  return
1653 
1654  def prepare_RECOBEFMIX(self, sequence = "reconstruction"):
1655  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1656  if not self._options.fast:
1657  print "ERROR: this step is only implemented for FastSim"
1658  sys.exit()
1659  self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1660  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_befmix_step')
1661  return
1662 
1663  def prepare_PAT(self, sequence = "miniAOD"):
1664  ''' Enrich the schedule with PAT '''
1665  self.loadDefaultOrSpecifiedCFF(sequence,self.PATDefaultCFF,1) #this is unscheduled
1666  if not self._options.runUnscheduled:
1667  raise Exception("MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1668  if self._options.isData:
1669  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1670  else:
1671  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1672  if self._options.fast:
1673  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff.miniAOD_customizeMETFiltersFastSim")
1674  return
1675 
1676  def prepare_EI(self, sequence = None):
1677  ''' Enrich the schedule with event interpretation '''
1678  from Configuration.StandardSequences.EventInterpretation import EventInterpretation
1679  if sequence in EventInterpretation:
1680  self.EIDefaultCFF = EventInterpretation[sequence]
1681  sequence = 'EIsequence'
1682  else:
1683  raise Exception('Cannot set %s event interpretation'%( sequence) )
1684  self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1685  self.scheduleSequence(sequence.split('.')[-1],'eventinterpretaion_step')
1686  return
1687 
1688  def prepare_SKIM(self, sequence = "all"):
1689  ''' Enrich the schedule with skimming fragments'''
1690  skimConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.SKIMDefaultCFF)
1691  sequence = sequence.split('.')[-1]
1692 
1693  skimlist=sequence.split('+')
1694  ## support @Mu+DiJet+@Electron configuration via autoSkim.py
1695  from Configuration.Skimming.autoSkim import autoSkim
1696  self.expandMapping(skimlist,autoSkim)
1697 
1698  #print "dictionnary for skims:",skimConfig.__dict__
1699  for skim in skimConfig.__dict__:
1700  skimstream = getattr(skimConfig,skim)
1701  if isinstance(skimstream,cms.Path):
1702  #black list the alca path so that they do not appear in the cfg
1703  self.blacklist_paths.append(skimstream)
1704  if (not isinstance(skimstream,cms.FilteredStream)):
1705  continue
1706  shortname = skim.replace('SKIMStream','')
1707  if (sequence=="all"):
1708  self.addExtraStream(skim,skimstream)
1709  elif (shortname in skimlist):
1710  self.addExtraStream(skim,skimstream)
1711  #add a DQM eventcontent for this guy
1712  if self._options.datatier=='DQM':
1713  self.process.load(self.EVTCONTDefaultCFF)
1714  skimstreamDQM = cms.FilteredStream(
1715  responsible = skimstream.responsible,
1716  name = skimstream.name+'DQM',
1717  paths = skimstream.paths,
1718  selectEvents = skimstream.selectEvents,
1719  content = self._options.datatier+'EventContent',
1720  dataTier = cms.untracked.string(self._options.datatier)
1721  )
1722  self.addExtraStream(skim+'DQM',skimstreamDQM)
1723  for i in range(skimlist.count(shortname)):
1724  skimlist.remove(shortname)
1725 
1726 
1727 
1728  if (skimlist.__len__()!=0 and sequence!="all"):
1729  print 'WARNING, possible typo with SKIM:'+'+'.join(skimlist)
1730  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1731 
1732  def prepare_USER(self, sequence = None):
1733  ''' Enrich the schedule with a user defined sequence '''
1734  self.loadDefaultOrSpecifiedCFF(sequence,self.USERDefaultCFF)
1735  self.scheduleSequence(sequence.split('.')[-1],'user_step')
1736  return
1737 
1738  def prepare_POSTRECO(self, sequence = None):
1739  """ Enrich the schedule with the postreco step """
1740  self.loadAndRemember(self.POSTRECODefaultCFF)
1741  self.scheduleSequence('postreco_generator','postreco_step')
1742  return
1743 
1744 
1745  def prepare_VALIDATION(self, sequence = 'validation'):
1746  print sequence,"in preparing validation"
1747  self.loadDefaultOrSpecifiedCFF(sequence,self.VALIDATIONDefaultCFF)
1748  from Validation.Configuration.autoValidation import autoValidation
1749  #in case VALIDATION:something:somethingelse -> something,somethingelse
1750  sequence=sequence.split('.')[-1]
1751  if sequence.find(',')!=-1:
1752  prevalSeqName=sequence.split(',')[0].split('+')
1753  valSeqName=sequence.split(',')[1].split('+')
1754  self.expandMapping(prevalSeqName,autoValidation,index=0)
1755  self.expandMapping(valSeqName,autoValidation,index=1)
1756  else:
1757  if '@' in sequence:
1758  prevalSeqName=sequence.split('+')
1759  valSeqName=sequence.split('+')
1760  self.expandMapping(prevalSeqName,autoValidation,index=0)
1761  self.expandMapping(valSeqName,autoValidation,index=1)
1762  else:
1763  postfix=''
1764  if sequence:
1765  postfix='_'+sequence
1766  prevalSeqName=['prevalidation'+postfix]
1767  valSeqName=['validation'+postfix]
1768  if not hasattr(self.process,valSeqName[0]):
1769  prevalSeqName=['']
1770  valSeqName=[sequence]
1771 
1772  def NFI(index):
1773  ##name from index, required to keep backward compatibility
1774  if index==0:
1775  return ''
1776  else:
1777  return '%s'%index
1778 
1779  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1780  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1781  self._options.restoreRNDSeeds=True
1782 
1783  #rename the HLT process in validation steps
1784  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1785  for s in valSeqName+prevalSeqName:
1786  if s:
1788  for (i,s) in enumerate(prevalSeqName):
1789  if s:
1790  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1791  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1792 
1793  for (i,s) in enumerate(valSeqName):
1794  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1795  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1796 
1797  if not 'DIGI' in self.stepMap and not self._options.fast:
1798  self.executeAndRemember("process.mix.playback = True")
1799  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1800  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1801  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1802 
1803  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1804  #will get in the schedule, smoothly
1805  for (i,s) in enumerate(valSeqName):
1806  getattr(self.process,'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,'validation_step%s'%NFI(i))._seq
1807 
1808  return
1809 
1810 
1812  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1813  It will climb down within PSets, VPSets and VInputTags to find its target"""
1814  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1815  self._paramReplace = paramReplace
1816  self._paramSearch = paramSearch
1817  self._verbose = verbose
1818  self._whitelist = whitelist
1819 
1820  def doIt(self,pset,base):
1821  if isinstance(pset, cms._Parameterizable):
1822  for name in pset.parameters_().keys():
1823  # skip whitelisted parameters
1824  if name in self._whitelist:
1825  continue
1826  # if I use pset.parameters_().items() I get copies of the parameter values
1827  # so I can't modify the nested pset
1828  value = getattr(pset,name)
1829  type = value.pythonTypeName()
1830  if type in ('cms.PSet', 'cms.untracked.PSet'):
1831  self.doIt(value,base+"."+name)
1832  elif type in ('cms.VPSet', 'cms.untracked.VPSet'):
1833  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1834  elif type in ('cms.string', 'cms.untracked.string'):
1835  if value.value() == self._paramSearch:
1836  if self._verbose: print "set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace)
1837  setattr(pset, name,self._paramReplace)
1838  elif type in ('cms.VInputTag', 'cms.untracked.VInputTag'):
1839  for (i,n) in enumerate(value):
1840  if not isinstance(n, cms.InputTag):
1841  n=cms.InputTag(n)
1842  if n.processName == self._paramSearch:
1843  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1844  if self._verbose:print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace)
1845  setattr(n,"processName",self._paramReplace)
1846  value[i]=n
1847  elif type in ('cms.vstring', 'cms.untracked.vstring'):
1848  for (i,n) in enumerate(value):
1849  if n==self._paramSearch:
1850  getattr(pset,name)[i]=self._paramReplace
1851  elif type in ('cms.InputTag', 'cms.untracked.InputTag'):
1852  if value.processName == self._paramSearch:
1853  if self._verbose: print "set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace)
1854  setattr(getattr(pset, name),"processName",self._paramReplace)
1855 
1856  def enter(self,visitee):
1857  label = ''
1858  try:
1859  label = visitee.label()
1860  except AttributeError:
1861  label = '<Module not in a Process>'
1862  except:
1863  label = 'other execption'
1864  self.doIt(visitee, label)
1865 
1866  def leave(self,visitee):
1867  pass
1868 
1869  #visit a sequence to repalce all input tags
1870  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1871  print "Replacing all InputTag %s => %s"%(oldT,newT)
1872  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1873  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1874  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1875  if not loadMe in self.additionalCommands:
1876  self.additionalCommands.append(loadMe)
1877  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1878 
1879  #change the process name used to address HLT results in any sequence
1880  def renameHLTprocessInSequence(self,sequence,proc=None,HLTprocess='HLT'):
1881  if self._options.hltProcess:
1882  proc=self._options.hltProcess
1883  else:
1884  proc=self.process.name_()
1885  if proc==HLTprocess: return
1886  # look up all module in dqm sequence
1887  print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1888  getattr(self.process,sequence).visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess,proc,whitelist = ("subSystemFolder",)))
1889  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1890  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1891  self.additionalCommands.append('process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1892 
1893 
1894  def expandMapping(self,seqList,mapping,index=None):
1895  maxLevel=20
1896  level=0
1897  while '@' in repr(seqList) and level<maxLevel:
1898  level+=1
1899  for specifiedCommand in seqList:
1900  if specifiedCommand.startswith('@'):
1901  location=specifiedCommand[1:]
1902  if not location in mapping:
1903  raise Exception("Impossible to map "+location+" from "+repr(mapping))
1904  mappedTo=mapping[location]
1905  if index!=None:
1906  mappedTo=mappedTo[index]
1907  seqList.remove(specifiedCommand)
1908  seqList.extend(mappedTo.split('+'))
1909  break;
1910  if level==maxLevel:
1911  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
1912 
1913  def prepare_DQM(self, sequence = 'DQMOffline'):
1914  # this one needs replacement
1915 
1916  self.loadDefaultOrSpecifiedCFF(sequence,self.DQMOFFLINEDefaultCFF)
1917  sequenceList=sequence.split('.')[-1].split('+')
1918  from DQMOffline.Configuration.autoDQM import autoDQM
1919  self.expandMapping(sequenceList,autoDQM,index=0)
1920 
1921  if len(set(sequenceList))!=len(sequenceList):
1922  sequenceList=list(set(sequenceList))
1923  print "Duplicate entries for DQM:, using",sequenceList
1924  pathName='dqmoffline_step'
1925 
1926  for (i,sequence) in enumerate(sequenceList):
1927  if (i!=0):
1928  pathName='dqmoffline_%d_step'%(i)
1929 
1930  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
1931  self.renameHLTprocessInSequence(sequence)
1932 
1933  # if both HLT and DQM are run in the same process, schedule [HLT]DQM in an EndPath
1934  if 'HLT' in self.stepMap.keys():
1935  # need to put [HLT]DQM in an EndPath, to access the HLT trigger results
1936  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1937  else:
1938  # schedule DQM as a standard Path
1939  setattr(self.process,pathName, cms.Path( getattr(self.process, sequence) ) )
1940  self.schedule.append(getattr(self.process,pathName))
1941 
1942 
1943  def prepare_HARVESTING(self, sequence = None):
1944  """ Enrich the process with harvesting step """
1945  self.EDMtoMECFF='Configuration/StandardSequences/EDMtoME'+self._options.harvesting+'_cff'
1946  self.loadAndRemember(self.EDMtoMECFF)
1947  self.scheduleSequence('EDMtoME','edmtome_step')
1948 
1949  harvestingConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.HARVESTINGDefaultCFF)
1950  sequence = sequence.split('.')[-1]
1951 
1952  # decide which HARVESTING paths to use
1953  harvestingList = sequence.split("+")
1954  from DQMOffline.Configuration.autoDQM import autoDQM
1955  from Validation.Configuration.autoValidation import autoValidation
1956  import copy
1957  combined_mapping = copy.deepcopy( autoDQM )
1958  combined_mapping.update( autoValidation )
1959  self.expandMapping(harvestingList,combined_mapping,index=-1)
1960 
1961  if len(set(harvestingList))!=len(harvestingList):
1962  harvestingList=list(set(harvestingList))
1963  print "Duplicate entries for HARVESTING, using",harvestingList
1964 
1965  for name in harvestingList:
1966  if not name in harvestingConfig.__dict__:
1967  print name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1968  continue
1969  harvestingstream = getattr(harvestingConfig,name)
1970  if isinstance(harvestingstream,cms.Path):
1971  self.schedule.append(harvestingstream)
1972  self.blacklist_paths.append(harvestingstream)
1973  if isinstance(harvestingstream,cms.Sequence):
1974  setattr(self.process,name+"_step",cms.Path(harvestingstream))
1975  self.schedule.append(getattr(self.process,name+"_step"))
1976 
1977  self.scheduleSequence('DQMSaver','dqmsave_step')
1978  return
1979 
1980  def prepare_ALCAHARVEST(self, sequence = None):
1981  """ Enrich the process with AlCaHarvesting step """
1982  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
1983  sequence=sequence.split(".")[-1]
1984 
1985  # decide which AlcaHARVESTING paths to use
1986  harvestingList = sequence.split("+")
1987 
1988 
1989 
1990  from Configuration.AlCa.autoPCL import autoPCL
1991  self.expandMapping(harvestingList,autoPCL)
1992 
1993  for name in harvestingConfig.__dict__:
1994  harvestingstream = getattr(harvestingConfig,name)
1995  if name in harvestingList and isinstance(harvestingstream,cms.Path):
1996  self.schedule.append(harvestingstream)
1997  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
1998  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
1999  harvestingList.remove(name)
2000  # append the common part at the end of the sequence
2001  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2002  self.schedule.append(lastStep)
2003 
2004  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2005  print "The following harvesting could not be found : ", harvestingList
2006  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2007 
2008 
2009 
2010  def prepare_ENDJOB(self, sequence = 'endOfProcess'):
2011  self.loadDefaultOrSpecifiedCFF(sequence,self.ENDJOBDefaultCFF)
2012  self.scheduleSequenceAtEnd(sequence.split('.')[-1],'endjob_step')
2013  return
2014 
2016  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2017  self.schedule.append(self.process.reconstruction)
2018 
2019 
2020  def build_production_info(self, evt_type, evtnumber):
2021  """ Add useful info for the production. """
2022  self.process.configurationMetadata=cms.untracked.PSet\
2023  (version=cms.untracked.string("$Revision: 1.19 $"),
2024  name=cms.untracked.string("Applications"),
2025  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2026  )
2027 
2028  self.addedObjects.append(("Production Info","configurationMetadata"))
2029 
2030 
2031  def prepare(self, doChecking = False):
2032  """ Prepare the configuration string and add missing pieces."""
2033 
2034  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2035  self.addMaxEvents()
2036  if self.with_input:
2037  self.addSource()
2038  self.addStandardSequences()
2039  ##adding standard sequences might change the inputEventContent option and therefore needs to be finalized after
2040  self.completeInputCommand()
2041  self.addConditions()
2042 
2043 
2044  outputModuleCfgCode=""
2045  if not 'HARVESTING' in self.stepMap.keys() and not 'SKIM' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2046  outputModuleCfgCode=self.addOutput()
2047 
2048  self.addCommon()
2049 
2050  self.pythonCfgCode = "# Auto generated configuration file\n"
2051  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2052  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2053  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2054  if hasattr(self._options,"era") and self._options.era :
2055  self.pythonCfgCode += "from Configuration.StandardSequences.Eras import eras\n\n"
2056  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"'" # Start of the line, finished after the loop
2057  # Multiple eras can be specified in a comma seperated list
2058  for requestedEra in self._options.era.split(",") :
2059  self.pythonCfgCode += ",eras."+requestedEra
2060  self.pythonCfgCode += ")\n\n" # end of the line
2061  else :
2062  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"')\n\n"
2063 
2064  self.pythonCfgCode += "# import of standard configurations\n"
2065  for module in self.imports:
2066  self.pythonCfgCode += ("process.load('"+module+"')\n")
2067 
2068  # production info
2069  if not hasattr(self.process,"configurationMetadata"):
2070  self.build_production_info(self._options.evt_type, self._options.number)
2071  else:
2072  #the PSet was added via a load
2073  self.addedObjects.append(("Production Info","configurationMetadata"))
2074 
2075  self.pythonCfgCode +="\n"
2076  for comment,object in self.addedObjects:
2077  if comment!="":
2078  self.pythonCfgCode += "\n# "+comment+"\n"
2079  self.pythonCfgCode += dumpPython(self.process,object)
2080 
2081  # dump the output definition
2082  self.pythonCfgCode += "\n# Output definition\n"
2083  self.pythonCfgCode += outputModuleCfgCode
2084 
2085  # dump all additional outputs (e.g. alca or skim streams)
2086  self.pythonCfgCode += "\n# Additional output definition\n"
2087  #I do not understand why the keys are not normally ordered.
2088  nl=self.additionalOutputs.keys()
2089  nl.sort()
2090  for name in nl:
2091  output = self.additionalOutputs[name]
2092  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2093  tmpOut = cms.EndPath(output)
2094  setattr(self.process,name+'OutPath',tmpOut)
2095  self.schedule.append(tmpOut)
2096 
2097  # dump all additional commands
2098  self.pythonCfgCode += "\n# Other statements\n"
2099  for command in self.additionalCommands:
2100  self.pythonCfgCode += command + "\n"
2101 
2102  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2103  for object in self._options.inlineObjets.split(','):
2104  if not object:
2105  continue
2106  if not hasattr(self.process,object):
2107  print 'cannot inline -'+object+'- : not known'
2108  else:
2109  self.pythonCfgCode +='\n'
2110  self.pythonCfgCode +=dumpPython(self.process,object)
2111 
2112  # dump all paths
2113  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2114  for path in self.process.paths:
2115  if getattr(self.process,path) not in self.blacklist_paths:
2116  self.pythonCfgCode += dumpPython(self.process,path)
2117 
2118  for endpath in self.process.endpaths:
2119  if getattr(self.process,endpath) not in self.blacklist_paths:
2120  self.pythonCfgCode += dumpPython(self.process,endpath)
2121 
2122  # dump the schedule
2123  if not self._options.runUnscheduled:
2124  self.pythonCfgCode += "\n# Schedule definition\n"
2125  result = "process.schedule = cms.Schedule("
2126 
2127  # handling of the schedule
2128  self.process.schedule = cms.Schedule()
2129  for item in self.schedule:
2130  if not isinstance(item, cms.Schedule):
2131  self.process.schedule.append(item)
2132  else:
2133  self.process.schedule.extend(item)
2134 
2135  if hasattr(self.process,"HLTSchedule"):
2136  beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2137  afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2138  pathNames = ['process.'+p.label_() for p in beforeHLT]
2139  result += ','.join(pathNames)+')\n'
2140  result += 'process.schedule.extend(process.HLTSchedule)\n'
2141  pathNames = ['process.'+p.label_() for p in afterHLT]
2142  result += 'process.schedule.extend(['+','.join(pathNames)+'])\n'
2143  else:
2144  pathNames = ['process.'+p.label_() for p in self.schedule]
2145  result ='process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2146 
2147  self.pythonCfgCode += result
2148 
2149  if self._options.nThreads is not "1":
2150  self.pythonCfgCode +="\n"
2151  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2152  self.pythonCfgCode +="process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+")\n"
2153  self.pythonCfgCode +="process.options.numberOfStreams=cms.untracked.uint32(0)\n"
2154  #repacked version
2155  if self._options.isRepacked:
2156  self.pythonCfgCode +="\n"
2157  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2158  self.pythonCfgCode +="MassReplaceInputTag(process)\n"
2159  MassReplaceInputTag(self.process)
2160 
2161  # special treatment in case of production filter sequence 2/2
2162  if self.productionFilterSequence:
2163  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2164  self.pythonCfgCode +='for path in process.paths:\n'
2165  if len(self.conditionalPaths):
2166  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2167  if len(self.excludedPaths):
2168  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2169  self.pythonCfgCode +='\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2170  pfs = getattr(self.process,self.productionFilterSequence)
2171  for path in self.process.paths:
2172  if not path in self.conditionalPaths: continue
2173  if path in self.excludedPaths: continue
2174  getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2175 
2176 
2177  # dump customise fragment
2178  self.pythonCfgCode += self.addCustomise()
2179 
2180  if self._options.runUnscheduled:
2181  # prune and delete paths
2182  #this is not supporting the blacklist at this point since I do not understand it
2183  self.pythonCfgCode+="#do not add changes to your config after this point (unless you know what you are doing)\n"
2184  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import convertToUnscheduled\n"
2185 
2186  self.pythonCfgCode+="process=convertToUnscheduled(process)\n"
2187 
2188  #now add the unscheduled stuff
2189  for module in self.importsUnsch:
2190  self.process.load(module)
2191  self.pythonCfgCode += ("process.load('"+module+"')\n")
2192 
2193  self.pythonCfgCode += self.addCustomise(1)
2194 
2195 
2196  # make the .io file
2197 
2198  if self._options.io:
2199  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2200  if not self._options.io.endswith('.io'): self._option.io+='.io'
2201  io=open(self._options.io,'w')
2202  ioJson={}
2203  if hasattr(self.process.source,"fileNames"):
2204  if len(self.process.source.fileNames.value()):
2205  ioJson['primary']=self.process.source.fileNames.value()
2206  if hasattr(self.process.source,"secondaryFileNames"):
2207  if len(self.process.source.secondaryFileNames.value()):
2208  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2209  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2210  ioJson['pileup']=self._options.pileup_input[4:]
2211  for (o,om) in self.process.outputModules_().items():
2212  ioJson[o]=om.fileName.value()
2213  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2214  if self.productionFilterSequence:
2215  ioJson['filter']=self.productionFilterSequence
2216  import json
2217  io.write(json.dumps(ioJson))
2218  return
2219 
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:34
assert(m_qm.get())
def visit
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_RECO
put the filtering path in the schedule
tuple zip
Definition: archive.py:476
def massSearchReplaceAnyInputTag
Definition: helpers.py:262
def defineMixing
Definition: Mixing.py:167
inliner
needs to be in reverse order
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
list object
Definition: dbtoconf.py:77
double split
Definition: MVATrainer.cc:139
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run