CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python
2 
3 __version__ = "$Revision: 1.19 $"
4 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
5 
6 import FWCore.ParameterSet.Config as cms
7 from FWCore.ParameterSet.Modules import _Module
8 import sys
9 import re
10 import collections
11 import FWCore.ParameterSet.DictTypes as DictTypes
12 class Options:
13  pass
14 
15 # the canonical defaults
16 defaultOptions = Options()
17 defaultOptions.datamix = 'DataOnSim'
18 defaultOptions.isMC=False
19 defaultOptions.isData=True
20 defaultOptions.step=''
21 defaultOptions.pileup='NoPileUp'
22 defaultOptions.pileup_input = None
23 defaultOptions.geometry = 'SimDB'
24 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
25 defaultOptions.magField = '38T'
26 defaultOptions.conditions = None
27 defaultOptions.useCondDBv1 = False
28 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
29 defaultOptions.harvesting= 'AtRunEnd'
30 defaultOptions.gflash = False
31 defaultOptions.number = -1
32 defaultOptions.number_out = None
33 defaultOptions.arguments = ""
34 defaultOptions.name = "NO NAME GIVEN"
35 defaultOptions.evt_type = ""
36 defaultOptions.filein = ""
37 defaultOptions.dasquery=""
38 defaultOptions.secondfilein = ""
39 defaultOptions.customisation_file = []
40 defaultOptions.customisation_file_unsch = []
41 defaultOptions.customise_commands = ""
42 defaultOptions.inline_custom=False
43 defaultOptions.particleTable = 'pythiapdt'
44 defaultOptions.particleTableList = ['pythiapdt','pdt']
45 defaultOptions.dirin = ''
46 defaultOptions.dirout = ''
47 defaultOptions.filetype = 'EDM'
48 defaultOptions.fileout = 'output.root'
49 defaultOptions.filtername = ''
50 defaultOptions.lazy_download = False
51 defaultOptions.custom_conditions = ''
52 defaultOptions.hltProcess = ''
53 defaultOptions.eventcontent = None
54 defaultOptions.datatier = None
55 defaultOptions.inlineEventContent = True
56 defaultOptions.inlineObjets =''
57 defaultOptions.hideGen=False
58 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
59 defaultOptions.beamspot=None
60 defaultOptions.outputDefinition =''
61 defaultOptions.inputCommands = None
62 defaultOptions.outputCommands = None
63 defaultOptions.inputEventContent = ''
64 defaultOptions.dropDescendant = False
65 defaultOptions.relval = None
66 defaultOptions.slhc = None
67 defaultOptions.profile = None
68 defaultOptions.isRepacked = False
69 defaultOptions.restoreRNDSeeds = False
70 defaultOptions.donotDropOnInput = ''
71 defaultOptions.python_filename =''
72 defaultOptions.io=None
73 defaultOptions.lumiToProcess=None
74 defaultOptions.fast=False
75 defaultOptions.runsAndWeightsForMC = None
76 defaultOptions.runsScenarioForMC = None
77 defaultOptions.runUnscheduled = False
78 defaultOptions.timeoutOutput = False
79 defaultOptions.nThreads = '1'
80 
81 # some helper routines
82 def dumpPython(process,name):
83  theObject = getattr(process,name)
84  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
85  return "process."+name+" = " + theObject.dumpPython("process")
86  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
87  return "process."+name+" = " + theObject.dumpPython()+"\n"
88  else:
89  return "process."+name+" = " + theObject.dumpPython()+"\n"
90 def filesFromList(fileName,s=None):
91  import os
92  import FWCore.ParameterSet.Config as cms
93  prim=[]
94  sec=[]
95  for line in open(fileName,'r'):
96  if line.count(".root")>=2:
97  #two files solution...
98  entries=line.replace("\n","").split()
99  if not entries[0] in prim:
100  prim.append(entries[0])
101  if not entries[1] in sec:
102  sec.append(entries[1])
103  elif (line.find(".root")!=-1):
104  entry=line.replace("\n","")
105  if not entry in prim:
106  prim.append(entry)
107  if s:
108  if not hasattr(s,"fileNames"):
109  s.fileNames=cms.untracked.vstring(prim)
110  else:
111  s.fileNames.extend(prim)
112  if len(sec)!=0:
113  if not hasattr(s,"secondaryFileNames"):
114  s.secondaryFileNames=cms.untracked.vstring(sec)
115  else:
116  s.secondaryFileNames.extend(sec)
117  print "found files: ",prim
118  if len(prim)==0:
119  raise Exception("There are not files in input from the file list")
120  if len(sec)!=0:
121  print "found parent files:",sec
122  return (prim,sec)
123 
124 def filesFromDASQuery(query,s=None):
125  import os
126  import FWCore.ParameterSet.Config as cms
127  prim=[]
128  sec=[]
129  print "the query is",query
130  for line in os.popen('das_client.py --query "%s"'%(query)):
131  if line.count(".root")>=2:
132  #two files solution...
133  entries=line.replace("\n","").split()
134  if not entries[0] in prim:
135  prim.append(entries[0])
136  if not entries[1] in sec:
137  sec.append(entries[1])
138  elif (line.find(".root")!=-1):
139  entry=line.replace("\n","")
140  if not entry in prim:
141  prim.append(entry)
142  if s:
143  if not hasattr(s,"fileNames"):
144  s.fileNames=cms.untracked.vstring(prim)
145  else:
146  s.fileNames.extend(prim)
147  if len(sec)!=0:
148  if not hasattr(s,"secondaryFileNames"):
149  s.secondaryFileNames=cms.untracked.vstring(sec)
150  else:
151  s.secondaryFileNames.extend(sec)
152  print "found files: ",prim
153  if len(sec)!=0:
154  print "found parent files:",sec
155  return (prim,sec)
156 
157 def MassReplaceInputTag(aProcess,oldT="rawDataCollector",newT="rawDataRepacker"):
158  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
159  for s in aProcess.paths_().keys():
160  massSearchReplaceAnyInputTag(getattr(aProcess,s),oldT,newT)
161 
162 def anyOf(listOfKeys,dict,opt=None):
163  for k in listOfKeys:
164  if k in dict:
165  toReturn=dict[k]
166  dict.pop(k)
167  return toReturn
168  if opt!=None:
169  return opt
170  else:
171  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
172 
174  """The main building routines """
175 
176  def __init__(self, options, process = None, with_output = False, with_input = False ):
177  """options taken from old cmsDriver and optparse """
178 
179  options.outfile_name = options.dirout+options.fileout
180 
181  self._options = options
182 
183  if self._options.isData and options.isMC:
184  raise Exception("ERROR: You may specify only --data or --mc, not both")
185  #if not self._options.conditions:
186  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
187 
188  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
189  if 'ENDJOB' in self._options.step:
190  if (hasattr(self._options,"outputDefinition") and \
191  self._options.outputDefinition != '' and \
192  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
193  (hasattr(self._options,"datatier") and \
194  self._options.datatier and \
195  'DQMIO' in self._options.datatier):
196  print "removing ENDJOB from steps since not compatible with DQMIO dataTier"
197  self._options.step=self._options.step.replace(',ENDJOB','')
198 
199 
200 
201  # what steps are provided by this class?
202  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
203  self.stepMap={}
204  self.stepKeys=[]
205  for step in self._options.step.split(","):
206  if step=='': continue
207  stepParts = step.split(":")
208  stepName = stepParts[0]
209  if stepName not in stepList and not stepName.startswith('re'):
210  raise ValueError("Step "+stepName+" unknown")
211  if len(stepParts)==1:
212  self.stepMap[stepName]=""
213  elif len(stepParts)==2:
214  self.stepMap[stepName]=stepParts[1].split('+')
215  elif len(stepParts)==3:
216  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
217  else:
218  raise ValueError("Step definition "+step+" invalid")
219  self.stepKeys.append(stepName)
220 
221  #print "map of steps is:",self.stepMap
222 
223  self.with_output = with_output
224  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
225  self.with_output = False
226  self.with_input = with_input
227  if process == None:
228  self.process = cms.Process(self._options.name)
229  else:
230  self.process = process
231  self.imports = []
232  self.importsUnsch = []
233  self.define_Configs()
234  self.schedule = list()
235 
236  # we are doing three things here:
237  # creating a process to catch errors
238  # building the code to re-create the process
239 
240  self.additionalCommands = []
241  # TODO: maybe a list of to be dumped objects would help as well
242  self.blacklist_paths = []
243  self.addedObjects = []
244  self.additionalOutputs = {}
245 
246  self.productionFilterSequence = None
247  self.nextScheduleIsConditional=False
248  self.conditionalPaths=[]
249  self.excludedPaths=[]
250 
251  def profileOptions(self):
252  """
253  addIgProfService
254  Function to add the igprof profile service so that you can dump in the middle
255  of the run.
256  """
257  profileOpts = self._options.profile.split(':')
258  profilerStart = 1
259  profilerInterval = 100
260  profilerFormat = None
261  profilerJobFormat = None
262 
263  if len(profileOpts):
264  #type, given as first argument is unused here
265  profileOpts.pop(0)
266  if len(profileOpts):
267  startEvent = profileOpts.pop(0)
268  if not startEvent.isdigit():
269  raise Exception("%s is not a number" % startEvent)
270  profilerStart = int(startEvent)
271  if len(profileOpts):
272  eventInterval = profileOpts.pop(0)
273  if not eventInterval.isdigit():
274  raise Exception("%s is not a number" % eventInterval)
275  profilerInterval = int(eventInterval)
276  if len(profileOpts):
277  profilerFormat = profileOpts.pop(0)
278 
279 
280  if not profilerFormat:
281  profilerFormat = "%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace("_cfi", ""),
282  self._options.step,
283  self._options.pileup,
284  self._options.conditions,
285  self._options.datatier,
286  self._options.profileTypeLabel)
287  if not profilerJobFormat and profilerFormat.endswith(".gz"):
288  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
289  elif not profilerJobFormat:
290  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
291 
292  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
293 
294  def load(self,includeFile):
295  includeFile = includeFile.replace('/','.')
296  self.process.load(includeFile)
297  return sys.modules[includeFile]
298 
299  def loadAndRemember(self, includeFile,unsch=0):
300  """helper routine to load am memorize imports"""
301  # we could make the imports a on-the-fly data method of the process instance itself
302  # not sure if the latter is a good idea
303  includeFile = includeFile.replace('/','.')
304  if unsch==0:
305  self.imports.append(includeFile)
306  self.process.load(includeFile)
307  return sys.modules[includeFile]
308  else:
309  self.importsUnsch.append(includeFile)
310  return 0#sys.modules[includeFile]
311 
312  def executeAndRemember(self, command):
313  """helper routine to remember replace statements"""
314  self.additionalCommands.append(command)
315  if not command.strip().startswith("#"):
316  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
317  import re
318  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
319  #exec(command.replace("process.","self.process."))
320 
321  def addCommon(self):
322  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
323  self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring('ProductNotFound'),fileMode = cms.untracked.string('FULLMERGE'))
324  else:
325  self.process.options = cms.untracked.PSet( )
326 
327  if self._options.runUnscheduled:
328  self.process.options.allowUnscheduled=cms.untracked.bool(True)
329 
330  self.addedObjects.append(("","options"))
331 
332  if self._options.lazy_download:
333  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
334  stats = cms.untracked.bool(True),
335  enable = cms.untracked.bool(True),
336  cacheHint = cms.untracked.string("lazy-download"),
337  readHint = cms.untracked.string("read-ahead-buffered")
338  )
339  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
340 
341  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
342  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
343 
344  if self._options.profile:
345  (start, interval, eventFormat, jobFormat)=self.profileOptions()
346  self.process.IgProfService = cms.Service("IgProfService",
347  reportFirstEvent = cms.untracked.int32(start),
348  reportEventInterval = cms.untracked.int32(interval),
349  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
350  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
351  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
352 
353  def addMaxEvents(self):
354  """Here we decide how many evts will be processed"""
355  self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(int(self._options.number)))
356  if self._options.number_out:
357  self.process.maxEvents.output = cms.untracked.int32(int(self._options.number_out))
358  self.addedObjects.append(("","maxEvents"))
359 
360  def addSource(self):
361  """Here the source is built. Priority: file, generator"""
362  self.addedObjects.append(("Input source","source"))
363 
364  def filesFromOption(self):
365  for entry in self._options.filein.split(','):
366  print "entry",entry
367  if entry.startswith("filelist:"):
368  filesFromList(entry[9:],self.process.source)
369  elif entry.startswith("dbs:") or entry.startswith("das:"):
370  filesFromDASQuery('file dataset = %s'%(entry[4:]),self.process.source)
371  else:
372  self.process.source.fileNames.append(self._options.dirin+entry)
373  if self._options.secondfilein:
374  if not hasattr(self.process.source,"secondaryFileNames"):
375  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
376  for entry in self._options.secondfilein.split(','):
377  print "entry",entry
378  if entry.startswith("filelist:"):
379  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
380  elif entry.startswith("dbs:") or entry.startswith("das:"):
381  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:])))[0])
382  else:
383  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
384 
385  if self._options.filein or self._options.dasquery:
386  if self._options.filetype == "EDM":
387  self.process.source=cms.Source("PoolSource",
388  fileNames = cms.untracked.vstring(),
389  secondaryFileNames= cms.untracked.vstring())
390  filesFromOption(self)
391  elif self._options.filetype == "DAT":
392  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
393  filesFromOption(self)
394  elif self._options.filetype == "LHE":
395  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
396  if self._options.filein.startswith("lhe:"):
397  #list the article directory automatically
398  args=self._options.filein.split(':')
399  article=args[1]
400  print 'LHE input from article ',article
401  location='/store/lhe/'
402  import os
403  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
404  for line in textOfFiles:
405  for fileName in [x for x in line.split() if '.lhe' in x]:
406  self.process.source.fileNames.append(location+article+'/'+fileName)
407  if len(args)>2:
408  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
409  else:
410  filesFromOption(self)
411 
412 
413  elif self._options.filetype == "DQM":
414  self.process.source=cms.Source("DQMRootSource",
415  fileNames = cms.untracked.vstring())
416  filesFromOption(self)
417 
418  elif self._options.filetype == "DQMDAQ":
419  # FIXME: how to configure it if there are no input files specified?
420  self.process.source=cms.Source("DQMStreamerReader")
421 
422 
423  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
424  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
425 
426  if self._options.dasquery!='':
427  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
428  filesFromDASQuery(self._options.dasquery,self.process.source)
429 
430  if self._options.inputCommands:
431  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
432  for command in self._options.inputCommands.split(','):
433  # remove whitespace around the keep/drop statements
434  command = command.strip()
435  if command=='': continue
436  self.process.source.inputCommands.append(command)
437  if not self._options.dropDescendant:
438  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
439 
440  if self._options.lumiToProcess:
441  import FWCore.PythonUtilities.LumiList as LumiList
442  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
443 
444  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
445  if self.process.source is None:
446  self.process.source=cms.Source("EmptySource")
447 
448  # modify source in case of run-dependent MC
449  self.runsAndWeights=None
450  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
451  if not self._options.isMC :
452  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
453  if self._options.runsAndWeightsForMC:
454  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
455  else:
456  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
457  if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
458  __import__(RunsAndWeights[self._options.runsScenarioForMC])
459  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
460  else:
461  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
462 
463  if self.runsAndWeights:
464  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
465  ThrowAndSetRandomRun.throwAndSetRandomRun(self.process.source,self.runsAndWeights)
466  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
467  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
468 
469  return
470 
471  def addOutput(self):
472  """ Add output module to the process """
473  result=""
474  if self._options.outputDefinition:
475  if self._options.datatier:
476  print "--datatier & --eventcontent options ignored"
477 
478  #new output convention with a list of dict
479  outList = eval(self._options.outputDefinition)
480  for (id,outDefDict) in enumerate(outList):
481  outDefDictStr=outDefDict.__str__()
482  if not isinstance(outDefDict,dict):
483  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
484  #requires option: tier
485  theTier=anyOf(['t','tier','dataTier'],outDefDict)
486  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
487  ## event content
488  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
489  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
490  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
491  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
492  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
493  # module label has a particular role
494  if not theModuleLabel:
495  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
496  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
497  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
498  ]
499  for name in tryNames:
500  if not hasattr(self.process,name):
501  theModuleLabel=name
502  break
503  if not theModuleLabel:
504  raise Exception("cannot find a module label for specification: "+outDefDictStr)
505  if id==0:
506  defaultFileName=self._options.outfile_name
507  else:
508  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
509 
510  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
511  if not theFileName.endswith('.root'):
512  theFileName+='.root'
513 
514  if len(outDefDict.keys()):
515  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
516  if theStreamType=='DQMIO': theStreamType='DQM'
517  if theStreamType=='ALL':
518  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
519  else:
520  theEventContent = getattr(self.process, theStreamType+"EventContent")
521 
522 
523  addAlCaSelects=False
524  if theStreamType=='ALCARECO' and not theFilterName:
525  theFilterName='StreamALCACombined'
526  addAlCaSelects=True
527 
528  CppType='PoolOutputModule'
529  if self._options.timeoutOutput:
530  CppType='TimeoutPoolOutputModule'
531  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
532  output = cms.OutputModule(CppType,
533  theEventContent.clone(),
534  fileName = cms.untracked.string(theFileName),
535  dataset = cms.untracked.PSet(
536  dataTier = cms.untracked.string(theTier),
537  filterName = cms.untracked.string(theFilterName))
538  )
539  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
540  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
541  if not theSelectEvent and hasattr(self.process,'filtering_step'):
542  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
543  if theSelectEvent:
544  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
545 
546  if addAlCaSelects:
547  if not hasattr(output,'SelectEvents'):
548  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
549  for alca in self.AlCaPaths:
550  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
551 
552 
553  if hasattr(self.process,theModuleLabel):
554  raise Exception("the current process already has a module "+theModuleLabel+" defined")
555  #print "creating output module ",theModuleLabel
556  setattr(self.process,theModuleLabel,output)
557  outputModule=getattr(self.process,theModuleLabel)
558  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
559  path=getattr(self.process,theModuleLabel+'_step')
560  self.schedule.append(path)
561 
562  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
563  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"):
564  return label
565  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
566  if theExtraOutputCommands:
567  if not isinstance(theExtraOutputCommands,list):
568  raise Exception("extra ouput command in --option must be a list of strings")
569  if hasattr(self.process,theStreamType+"EventContent"):
570  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
571  else:
572  outputModule.outputCommands.extend(theExtraOutputCommands)
573 
574  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
575 
576  ##ends the --output options model
577  return result
578 
579  streamTypes=self._options.eventcontent.split(',')
580  tiers=self._options.datatier.split(',')
581  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
582  raise Exception("number of event content arguments does not match number of datatier arguments")
583 
584  # if the only step is alca we don't need to put in an output
585  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
586  return "\n"
587 
588  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
589  if streamType=='': continue
590  if streamType=='DQMIO': streamType='DQM'
591  theEventContent = getattr(self.process, streamType+"EventContent")
592  if i==0:
593  theFileName=self._options.outfile_name
594  theFilterName=self._options.filtername
595  else:
596  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
597  theFilterName=self._options.filtername
598  CppType='PoolOutputModule'
599  if self._options.timeoutOutput:
600  CppType='TimeoutPoolOutputModule'
601  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
602  output = cms.OutputModule(CppType,
603  theEventContent,
604  fileName = cms.untracked.string(theFileName),
605  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
606  filterName = cms.untracked.string(theFilterName)
607  )
608  )
609  if hasattr(self.process,"generation_step") and streamType!='LHE':
610  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
611  if hasattr(self.process,"filtering_step"):
612  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
613 
614  if streamType=='ALCARECO':
615  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
616 
617  if "MINIAOD" in streamType:
618  output.dropMetaData = cms.untracked.string('ALL')
619  output.fastCloning= cms.untracked.bool(False)
620  output.overrideInputFileSplitLevels = cms.untracked.bool(True)
621 
622  outputModuleName=streamType+'output'
623  setattr(self.process,outputModuleName,output)
624  outputModule=getattr(self.process,outputModuleName)
625  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
626  path=getattr(self.process,outputModuleName+'_step')
627  self.schedule.append(path)
628 
629  if self._options.outputCommands and streamType!='DQM':
630  for evct in self._options.outputCommands.split(','):
631  if not evct: continue
632  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
633 
634  if not self._options.inlineEventContent:
635  def doNotInlineEventContent(instance,label = "process."+streamType+"EventContent.outputCommands"):
636  return label
637  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
638 
639  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
640 
641  return result
642 
644  """
645  Add selected standard sequences to the process
646  """
647  # load the pile up file
648  if self._options.pileup:
649  pileupSpec=self._options.pileup.split(',')[0]
650 
651  # FastSim: GEN-mixing or DIGI-RECO mixing?
652  GEN_mixing = False
653  if self._options.fast and pileupSpec.find("GEN_") == 0:
654  GEN_mixing = True
655  pileupSpec = pileupSpec[4:]
656 
657  # Does the requested pile-up scenario exist?
658  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
659  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
660  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
661  if self._options.fast:
662  message += "\n-"*20+"\n additional options for FastSim (gen-mixing):\n" + "-"*20 + "\n" + '\n'.join(["GEN_" + x for x in Mixing.keys()]) + "\n"
663  raise Exception(message)
664 
665  # Put mixing parameters in a dictionary
666  if '.' in pileupSpec:
667  mixingDict={'file':pileupSpec}
668  elif pileupSpec.startswith('file:'):
669  mixingDict={'file':pileupSpec[5:]}
670  else:
671  import copy
672  mixingDict=copy.copy(Mixing[pileupSpec])
673  if len(self._options.pileup.split(','))>1:
674  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
675 
676  # Load the pu cfg file corresponding to the requested pu scenario
677  if 'file:' in pileupSpec:
678  #the file is local
679  self.process.load(mixingDict['file'])
680  print "inlining mixing module configuration"
681  self._options.inlineObjets+=',mix'
682  else:
683  self.loadAndRemember(mixingDict['file'])
684 
685  # FastSim: transform cfg of MixingModule from FullSim to FastSim
686  if self._options.fast:
687  if GEN_mixing:
688  self._options.customisation_file.insert(0,"FastSimulation/Configuration/MixingModule_Full2Fast.prepareGenMixing")
689  else:
690  self._options.customisation_file.insert(0,"FastSimulation/Configuration/MixingModule_Full2Fast.prepareDigiRecoMixing")
691 
692  mixingDict.pop('file')
693  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
694  if self._options.pileup_input:
695  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
696  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],))[0]
697  else:
698  mixingDict['F']=self._options.pileup_input.split(',')
699  specialization=defineMixing(mixingDict)
700  for command in specialization:
701  self.executeAndRemember(command)
702  if len(mixingDict)!=0:
703  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
704 
705 
706  # load the geometry file
707  try:
708  if len(self.stepMap):
709  self.loadAndRemember(self.GeometryCFF)
710  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
711  self.loadAndRemember(self.SimGeometryCFF)
712  if self.geometryDBLabel:
713  self.executeAndRemember('process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
714  except ImportError:
715  print "Geometry option",self._options.geometry,"unknown."
716  raise
717 
718  if len(self.stepMap):
719  self.loadAndRemember(self.magFieldCFF)
720 
721  for stepName in self.stepKeys:
722  stepSpec = self.stepMap[stepName]
723  print "Step:", stepName,"Spec:",stepSpec
724  if stepName.startswith('re'):
725  ##add the corresponding input content
726  if stepName[2:] not in self._options.donotDropOnInput:
727  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
728  stepName=stepName[2:]
729  if stepSpec=="":
730  getattr(self,"prepare_"+stepName)(sequence = getattr(self,stepName+"DefaultSeq"))
731  elif type(stepSpec)==list:
732  getattr(self,"prepare_"+stepName)(sequence = '+'.join(stepSpec))
733  elif type(stepSpec)==tuple:
734  getattr(self,"prepare_"+stepName)(sequence = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
735  else:
736  raise ValueError("Invalid step definition")
737 
738  if self._options.restoreRNDSeeds!=False:
739  #it is either True, or a process name
740  if self._options.restoreRNDSeeds==True:
741  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
742  else:
743  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
744  if self._options.inputEventContent or self._options.inputCommands:
745  if self._options.inputCommands:
746  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
747  else:
748  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
749 
750 
752  if self._options.inputEventContent:
753  import copy
754  def dropSecondDropStar(iec):
755  #drop occurence of 'drop *' in the list
756  count=0
757  for item in iec:
758  if item=='drop *':
759  if count!=0:
760  iec.remove(item)
761  count+=1
762 
763 
764  ## allow comma separated input eventcontent
765  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
766  for evct in self._options.inputEventContent.split(','):
767  if evct=='': continue
768  theEventContent = getattr(self.process, evct+"EventContent")
769  if hasattr(theEventContent,'outputCommands'):
770  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
771  if hasattr(theEventContent,'inputCommands'):
772  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
773 
774  dropSecondDropStar(self.process.source.inputCommands)
775 
776  if not self._options.dropDescendant:
777  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
778 
779 
780  return
781 
782  def addConditions(self):
783  """Add conditions to the process"""
784  if not self._options.conditions: return
785 
786  if 'FrontierConditions_GlobalTag' in self._options.conditions:
787  print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line'
788  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
789 
790  self.loadAndRemember(self.ConditionsDefaultCFF)
791 
792  if self._options.useCondDBv1:
793  from Configuration.AlCa.GlobalTag import GlobalTag
794  else:
795  from Configuration.AlCa.GlobalTag_condDBv2 import GlobalTag
796 
797  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
798 
799  if self._options.useCondDBv1:
800  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
801  else:
802  self.additionalCommands.append('from Configuration.AlCa.GlobalTag_condDBv2 import GlobalTag')
803 
804  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
805 
806  if self._options.slhc:
807  self.loadAndRemember("SLHCUpgradeSimulations/Geometry/fakeConditions_%s_cff"%(self._options.slhc,))
808 
809 
810  def addCustomise(self,unsch=0):
811  """Include the customise code """
812 
813  custOpt=[]
814  if unsch==0:
815  for c in self._options.customisation_file:
816  custOpt.extend(c.split(","))
817  else:
818  for c in self._options.customisation_file_unsch:
819  custOpt.extend(c.split(","))
820 
822  for opt in custOpt:
823  if opt=='': continue
824  if opt.count('.')>1:
825  raise Exception("more than . in the specification:"+opt)
826  fileName=opt.split('.')[0]
827  if opt.count('.')==0: rest='customise'
828  else:
829  rest=opt.split('.')[1]
830  if rest=='py': rest='customise' #catch the case of --customise file.py
831 
832  if fileName in custMap:
833  custMap[fileName].extend(rest.split('+'))
834  else:
835  custMap[fileName]=rest.split('+')
836 
837  if len(custMap)==0:
838  final_snippet='\n'
839  else:
840  final_snippet='\n# customisation of the process.\n'
841 
842  allFcn=[]
843  for opt in custMap:
844  allFcn.extend(custMap[opt])
845  for fcn in allFcn:
846  if allFcn.count(fcn)!=1:
847  raise Exception("cannot specify twice "+fcn+" as a customisation method")
848 
849  for f in custMap:
850  # let python search for that package and do syntax checking at the same time
851  packageName = f.replace(".py","").replace("/",".")
852  __import__(packageName)
853  package = sys.modules[packageName]
854 
855  # now ask the package for its definition and pick .py instead of .pyc
856  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
857 
858  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
859  if self._options.inline_custom:
860  for line in file(customiseFile,'r'):
861  if "import FWCore.ParameterSet.Config" in line:
862  continue
863  final_snippet += line
864  else:
865  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
866  for fcn in custMap[f]:
867  print "customising the process with",fcn,"from",f
868  if not hasattr(package,fcn):
869  #bound to fail at run time
870  raise Exception("config "+f+" has no function "+fcn)
871  #execute the command
872  self.process=getattr(package,fcn)(self.process)
873  #and print it in the configuration
874  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
875  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
876 
877  if len(custMap)!=0:
878  final_snippet += '\n# End of customisation functions\n'
879 
880  ### now for a useful command
881  if unsch==1 or not self._options.runUnscheduled:
882  if self._options.customise_commands:
883  import string
884  final_snippet +='\n# Customisation from command line'
885  for com in self._options.customise_commands.split('\\n'):
886  com=string.lstrip(com)
887  self.executeAndRemember(com)
888  final_snippet +='\n'+com
889 
890  return final_snippet
891 
892  #----------------------------------------------------------------------------
893  # here the methods to define the python includes for each step or
894  # conditions
895  #----------------------------------------------------------------------------
896  def define_Configs(self):
897  if len(self.stepMap):
898  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
899  if self._options.particleTable not in defaultOptions.particleTableList:
900  print 'Invalid particle table provided. Options are:'
901  print defaultOptions.particleTable
902  sys.exit(-1)
903  else:
904  if len(self.stepMap):
905  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
906 
907  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
908 
909  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
910  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
911  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
912  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
913  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
914  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
915  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
916  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
917  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
918  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
919  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
920  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
921  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
922  self.EIDefaultCFF=None
923  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
924  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
925  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
926  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
927  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
928  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
929  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
930  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
931  if self._options.useCondDBv1:
932  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
933  else:
934  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_condDBv2_cff"
935  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
936  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
937 
938  if "DATAMIX" in self.stepMap.keys():
939  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
940  if self._options.datamix == 'PreMix':
941  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDMPreMix_cff"
942  else:
943  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
944  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
945  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
946 
947  if "DIGIPREMIX" in self.stepMap.keys():
948  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_PreMix_cff"
949 
950  self.ALCADefaultSeq=None
951  self.LHEDefaultSeq='externalLHEProducer'
952  self.GENDefaultSeq='pgen'
953  self.SIMDefaultSeq='psim'
954  self.DIGIDefaultSeq='pdigi'
955  self.DIGIPREMIXDefaultSeq='pdigi'
956  self.DIGIPREMIX_S2DefaultSeq='pdigi'
957  self.DATAMIXDefaultSeq=None
958  self.DIGI2RAWDefaultSeq='DigiToRaw'
959  self.HLTDefaultSeq='GRun'
960  self.L1DefaultSeq=None
961  self.L1REPACKDefaultSeq='GT'
962  self.HARVESTINGDefaultSeq=None
963  self.ALCAHARVESTDefaultSeq=None
964  self.CFWRITERDefaultSeq=None
965  self.RAW2DIGIDefaultSeq='RawToDigi'
966  self.L1RecoDefaultSeq='L1Reco'
967  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
968  if 'RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap:
969  self.RECODefaultSeq='reconstruction'
970  else:
971  self.RECODefaultSeq='reconstruction_fromRECO'
972 
973  self.EIDefaultSeq='top'
974  self.POSTRECODefaultSeq=None
975  self.L1HwValDefaultSeq='L1HwVal'
976  self.DQMDefaultSeq='DQMOffline'
977  self.VALIDATIONDefaultSeq=''
978  self.ENDJOBDefaultSeq='endOfProcess'
979  self.REPACKDefaultSeq='DigiToRawRepack'
980  self.PATDefaultSeq='miniAOD'
981 
982  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
983 
984  if not self._options.beamspot:
985  self._options.beamspot=VtxSmearedDefaultKey
986 
987  # if its MC then change the raw2digi
988  if self._options.isMC==True:
989  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
990  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
991  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
992  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
993  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
994  else:
995  self._options.beamspot = None
996 
997  #patch for gen, due to backward incompatibility
998  if 'reGEN' in self.stepMap:
999  self.GENDefaultSeq='fixGenInfo'
1000 
1001  if self._options.scenario=='cosmics':
1002  self._options.pileup='Cosmics'
1003  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1004  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1005  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1006  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1007  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1008  if self._options.isMC==True:
1009  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1010  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1011  self.RECODefaultSeq='reconstructionCosmics'
1012  self.DQMDefaultSeq='DQMOfflineCosmics'
1013 
1014  if self._options.scenario=='HeavyIons':
1015  if not self._options.beamspot:
1016  self._options.beamspot=VtxSmearedHIDefaultKey
1017  self.HLTDefaultSeq = 'HIon'
1018  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1019  self.VALIDATIONDefaultSeq=''
1020  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1021  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1022  self.RECODefaultSeq='reconstructionHeavyIons'
1023  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1024  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1025  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1026  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1027  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1028  if self._options.isMC==True:
1029  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1030 
1031 
1032  self.RAW2RECODefaultSeq=','.join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1033 
1034  self.USERDefaultSeq='user'
1035  self.USERDefaultCFF=None
1036 
1037  # the magnetic field
1038  if self._options.isData:
1039  if self._options.magField==defaultOptions.magField:
1040  print "magnetic field option forced to: AutoFromDBCurrent"
1041  self._options.magField='AutoFromDBCurrent'
1042  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1043  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1044 
1045  # the geometry
1046  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1047  self.geometryDBLabel=None
1048  simGeometry=''
1049  if self._options.fast:
1050  if 'start' in self._options.conditions.lower():
1051  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1052  else:
1053  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1054  else:
1055  def inGeometryKeys(opt):
1056  from Configuration.StandardSequences.GeometryConf import GeometryConf
1057  if opt in GeometryConf:
1058  return GeometryConf[opt]
1059  else:
1060  return opt
1061 
1062  geoms=self._options.geometry.split(',')
1063  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1064  if len(geoms)==2:
1065  #may specify the reco geometry
1066  if '/' in geoms[1] or '_cff' in geoms[1]:
1067  self.GeometryCFF=geoms[1]
1068  else:
1069  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1070 
1071  if (geoms[0].startswith('DB:')):
1072  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1073  self.geometryDBLabel=geoms[0][3:]
1074  print "with DB:"
1075  else:
1076  if '/' in geoms[0] or '_cff' in geoms[0]:
1077  self.SimGeometryCFF=geoms[0]
1078  else:
1079  simGeometry=geoms[0]
1080  if self._options.gflash==True:
1081  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1082  else:
1083  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1084 
1085  # synchronize the geometry configuration and the FullSimulation sequence to be used
1086  if simGeometry not in defaultOptions.geometryExtendedOptions:
1087  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1088 
1089  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1090  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1091  self._options.beamspot='NoSmear'
1092 
1093  # if fastsim switch event content
1094  if self._options.fast:
1095  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1096  self.SIMDefaultSeq = 'psim'
1097  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1098  self.RECODefaultSeq= 'reconstruction'
1099  self.EVTCONTDefaultCFF = "FastSimulation.Configuration.EventContent_cff"
1100  self.VALIDATIONDefaultCFF = "FastSimulation.Configuration.Validation_cff"
1101  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1102  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1103  self.DIGIDefaultCFF = 'FastSimulation.Configuration.Digi_cff'
1104  if self._options.datamix == 'PreMix':
1105  self.DIGIDefaultCFF="FastSimulation.Configuration.DigiDMPreMix_cff"
1106  if "DIGIPREMIX" in self.stepMap.keys():
1107  self.DIGIDefaultCFF="FastSimulation.Configuration.Digi_PreMix_cff"
1108  if "DATAMIX" in self.stepMap.keys():
1109  self.DATAMIXDefaultCFF="FastSimulation.Configuration.DataMixer"+self._options.datamix+"_cff"
1110 
1111  self.DIGIDefaultSeq = 'pdigi'
1112  self.L1EMDefaultCFF='FastSimulation.Configuration.SimL1Emulator_cff'
1113  self.L1RecoDefaultCFF='FastSimulation.Configuration.L1Reco_cff'
1114  self.DIGI2RAWDefaultCFF = 'FastSimulation.Configuration.DigiToRaw_cff'
1115  self.DIGI2RAWDefaultSeq = 'DigiToRaw'
1116  self.EVTCONTDefaultCFF = "FastSimulation.Configuration.EventContent_cff"
1117  self.VALIDATIONDefaultCFF = "FastSimulation.Configuration.Validation_cff"
1118 
1119 
1120 
1121  # Mixing
1122  if self._options.pileup=='default':
1123  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1124  self._options.pileup=MixingDefaultKey
1125  # temporary, until digi-reco mixing becomes standard in RelVals
1126  if self._options.fast:
1127  self._options.pileup="GEN_" + MixingDefaultKey
1128 
1129 
1130  #not driven by a default cff anymore
1131  if self._options.isData:
1132  self._options.pileup=None
1133 
1134  if self._options.slhc:
1135  self.GeometryCFF='SLHCUpgradeSimulations.Geometry.%s_cmsSimIdealGeometryXML_cff'%(self._options.slhc,)
1136  if 'stdgeom' not in self._options.slhc:
1137  self.SimGeometryCFF='SLHCUpgradeSimulations.Geometry.%s_cmsSimIdealGeometryXML_cff'%(self._options.slhc,)
1138  self.DIGIDefaultCFF='SLHCUpgradeSimulations/Geometry/Digi_%s_cff'%(self._options.slhc,)
1139  if self._options.pileup!=defaultOptions.pileup:
1140  self._options.pileup='SLHC_%s_%s'%(self._options.pileup,self._options.slhc)
1141 
1142  self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1143 
1144  # for alca, skims, etc
1145  def addExtraStream(self,name,stream,workflow='full'):
1146  # define output module and go from there
1147  output = cms.OutputModule("PoolOutputModule")
1148  if stream.selectEvents.parameters_().__len__()!=0:
1149  output.SelectEvents = stream.selectEvents
1150  else:
1151  output.SelectEvents = cms.untracked.PSet()
1152  output.SelectEvents.SelectEvents=cms.vstring()
1153  if isinstance(stream.paths,tuple):
1154  for path in stream.paths:
1155  output.SelectEvents.SelectEvents.append(path.label())
1156  else:
1157  output.SelectEvents.SelectEvents.append(stream.paths.label())
1158 
1159 
1160 
1161  if isinstance(stream.content,str):
1162  evtPset=getattr(self.process,stream.content)
1163  for p in evtPset.parameters_():
1164  setattr(output,p,getattr(evtPset,p))
1165  if not self._options.inlineEventContent:
1166  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1167  return label
1168  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1169  else:
1170  output.outputCommands = stream.content
1171 
1172 
1173  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1174 
1175  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1176  filterName = cms.untracked.string(stream.name))
1177 
1178  if self._options.filtername:
1179  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1180 
1181  #add an automatic flushing to limit memory consumption
1182  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1183 
1184  if workflow in ("producers,full"):
1185  if isinstance(stream.paths,tuple):
1186  for path in stream.paths:
1187  self.schedule.append(path)
1188  else:
1189  self.schedule.append(stream.paths)
1190 
1191 
1192  # in case of relvals we don't want to have additional outputs
1193  if (not self._options.relval) and workflow in ("full","output"):
1194  self.additionalOutputs[name] = output
1195  setattr(self.process,name,output)
1196 
1197  if workflow == 'output':
1198  # adjust the select events to the proper trigger results from previous process
1199  filterList = output.SelectEvents.SelectEvents
1200  for i, filter in enumerate(filterList):
1201  filterList[i] = filter+":"+self._options.triggerResultsProcess
1202 
1203  return output
1204 
1205  #----------------------------------------------------------------------------
1206  # here the methods to create the steps. Of course we are doing magic here ;)
1207  # prepare_STEPNAME modifies self.process and what else's needed.
1208  #----------------------------------------------------------------------------
1209 
1210  def loadDefaultOrSpecifiedCFF(self, sequence,defaultCFF,unsch=0):
1211  if ( len(sequence.split('.'))==1 ):
1212  l=self.loadAndRemember(defaultCFF,unsch)
1213  elif ( len(sequence.split('.'))==2 ):
1214  l=self.loadAndRemember(sequence.split('.')[0],unsch)
1215  sequence=sequence.split('.')[1]
1216  else:
1217  print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a"
1218  print sequence,"not recognized"
1219  raise
1220  return l
1221 
1222  def scheduleSequence(self,seq,prefix,what='Path'):
1223  if '*' in seq:
1224  #create only one path with all sequences in it
1225  for i,s in enumerate(seq.split('*')):
1226  if i==0:
1227  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1228  else:
1229  p=getattr(self.process,prefix)
1230  p+=getattr(self.process, s)
1231  self.schedule.append(getattr(self.process,prefix))
1232  return
1233  else:
1234  #create as many path as many sequences
1235  if not '+' in seq:
1236  if self.nextScheduleIsConditional:
1237  self.conditionalPaths.append(prefix)
1238  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1239  self.schedule.append(getattr(self.process,prefix))
1240  else:
1241  for i,s in enumerate(seq.split('+')):
1242  sn=prefix+'%d'%(i)
1243  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1244  self.schedule.append(getattr(self.process,sn))
1245  return
1246 
1247  def scheduleSequenceAtEnd(self,seq,prefix):
1248  self.scheduleSequence(seq,prefix,what='EndPath')
1249  return
1250 
1251  def prepare_ALCAPRODUCER(self, sequence = None):
1252  self.prepare_ALCA(sequence, workflow = "producers")
1253 
1254  def prepare_ALCAOUTPUT(self, sequence = None):
1255  self.prepare_ALCA(sequence, workflow = "output")
1256 
1257  def prepare_ALCA(self, sequence = None, workflow = 'full'):
1258  """ Enrich the process with alca streams """
1259  alcaConfig=self.loadDefaultOrSpecifiedCFF(sequence,self.ALCADefaultCFF)
1260  sequence = sequence.split('.')[-1]
1261 
1262  # decide which ALCA paths to use
1263  alcaList = sequence.split("+")
1264  maxLevel=0
1265  from Configuration.AlCa.autoAlca import autoAlca
1266  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1267  self.expandMapping(alcaList,autoAlca)
1268  self.AlCaPaths=[]
1269  for name in alcaConfig.__dict__:
1270  alcastream = getattr(alcaConfig,name)
1271  shortName = name.replace('ALCARECOStream','')
1272  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1273  output = self.addExtraStream(name,alcastream, workflow = workflow)
1274  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1275  self.AlCaPaths.append(shortName)
1276  if 'DQM' in alcaList:
1277  if not self._options.inlineEventContent and hasattr(self.process,name):
1278  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1279  else:
1280  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1281 
1282  #rename the HLT process name in the alca modules
1283  if self._options.hltProcess or 'HLT' in self.stepMap:
1284  if isinstance(alcastream.paths,tuple):
1285  for path in alcastream.paths:
1286  self.renameHLTprocessInSequence(path.label())
1287  else:
1288  self.renameHLTprocessInSequence(alcastream.paths.label())
1289 
1290  for i in range(alcaList.count(shortName)):
1291  alcaList.remove(shortName)
1292 
1293  # DQM needs a special handling
1294  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1295  path = getattr(alcaConfig,name)
1296  self.schedule.append(path)
1297  alcaList.remove('DQM')
1298 
1299  if isinstance(alcastream,cms.Path):
1300  #black list the alca path so that they do not appear in the cfg
1301  self.blacklist_paths.append(alcastream)
1302 
1303 
1304  if len(alcaList) != 0:
1305  available=[]
1306  for name in alcaConfig.__dict__:
1307  alcastream = getattr(alcaConfig,name)
1308  if isinstance(alcastream,cms.FilteredStream):
1309  available.append(name.replace('ALCARECOStream',''))
1310  print "The following alcas could not be found "+str(alcaList)
1311  print "available ",available
1312  #print "verify your configuration, ignoring for now"
1313  raise Exception("The following alcas could not be found "+str(alcaList))
1314 
1315  def prepare_LHE(self, sequence = None):
1316  #load the fragment
1317  ##make it loadable
1318  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1319  print "Loading lhe fragment from",loadFragment
1320  __import__(loadFragment)
1321  self.process.load(loadFragment)
1322  ##inline the modules
1323  self._options.inlineObjets+=','+sequence
1324 
1325  getattr(self.process,sequence).nEvents = int(self._options.number)
1326 
1327  #schedule it
1328  self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1329  self.excludedPaths.append("lhe_step")
1330  self.schedule.append( self.process.lhe_step )
1331 
1332  def prepare_GEN(self, sequence = None):
1333  """ load the fragment of generator configuration """
1334  loadFailure=False
1335  #remove trailing .py
1336  #support old style .cfi by changing into something.cfi into something_cfi
1337  #remove python/ from the name
1338  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1339  #standard location of fragments
1340  if not '/' in loadFragment:
1341  loadFragment='Configuration.Generator.'+loadFragment
1342  else:
1343  loadFragment=loadFragment.replace('/','.')
1344  try:
1345  print "Loading generator fragment from",loadFragment
1346  __import__(loadFragment)
1347  except:
1348  loadFailure=True
1349  #if self.process.source and self.process.source.type_()=='EmptySource':
1350  if not (self._options.filein or self._options.dasquery):
1351  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1352 
1353  if not loadFailure:
1354  generatorModule=sys.modules[loadFragment]
1355  genModules=generatorModule.__dict__
1356  #remove lhe producer module since this should have been
1357  #imported instead in the LHE step
1358  if self.LHEDefaultSeq in genModules:
1359  del genModules[self.LHEDefaultSeq]
1360 
1361  if self._options.hideGen:
1362  self.loadAndRemember(loadFragment)
1363  else:
1364  self.process.load(loadFragment)
1365  # expose the objects from that fragment to the configuration
1366  import FWCore.ParameterSet.Modules as cmstypes
1367  for name in genModules:
1368  theObject = getattr(generatorModule,name)
1369  if isinstance(theObject, cmstypes._Module):
1370  self._options.inlineObjets=name+','+self._options.inlineObjets
1371  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1372  self._options.inlineObjets+=','+name
1373 
1374  if sequence == self.GENDefaultSeq or sequence == 'pgen_genonly':
1375  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1376  self.productionFilterSequence = 'ProductionFilterSequence'
1377  elif 'generator' in genModules:
1378  self.productionFilterSequence = 'generator'
1379 
1380  """ Enrich the schedule with the rest of the generation step """
1381  self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1382  genSeqName=sequence.split('.')[-1]
1383 
1384  if True:
1385  try:
1386  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1387  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1388  self.loadAndRemember(cffToBeLoaded)
1389  except ImportError:
1390  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1391 
1392  if self._options.scenario == 'HeavyIons':
1393  if self._options.pileup=='HiMixGEN':
1394  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1395  else:
1396  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1397 
1398  self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1399  self.schedule.append(self.process.generation_step)
1400 
1401  #register to the genstepfilter the name of the path (static right now, but might evolve)
1402  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1403 
1404  if 'reGEN' in self.stepMap:
1405  #stop here
1406  return
1407 
1408  """ Enrich the schedule with the summary of the filter step """
1409  #the gen filter in the endpath
1410  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1411  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1412  return
1413 
1414  def prepare_SIM(self, sequence = None):
1415  """ Enrich the schedule with the simulation step"""
1416  self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1417  if not self._options.fast:
1418  if self._options.gflash==True:
1419  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1420 
1421  if self._options.magField=='0T':
1422  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1423  else:
1424  if self._options.magField=='0T':
1425  self.executeAndRemember("process.famosSimHits.UseMagneticField = cms.bool(False)")
1426 
1427  self.scheduleSequence(sequence.split('.')[-1],'simulation_step')
1428  return
1429 
1430  def prepare_DIGI(self, sequence = None):
1431  """ Enrich the schedule with the digitisation step"""
1432  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1433 
1434  if self._options.gflash==True:
1435  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1436 
1437  if sequence == 'pdigi_valid':
1438  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1439 
1440  if sequence != 'pdigi_nogen' and sequence != 'pdigi_valid_nogen' and not self.process.source.type_()=='EmptySource':
1441  if self._options.inputEventContent=='':
1442  self._options.inputEventContent='REGEN'
1443  else:
1444  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1445 
1446 
1447  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1448  return
1449 
1450  def prepare_DIGIPREMIX(self, sequence = None):
1451  """ Enrich the schedule with the digitisation step"""
1452  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1453 
1454  self.loadAndRemember("SimGeneral/MixingModule/digi_noNoise_cfi")
1455  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1456 
1457  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1458  return
1459 
1460  def prepare_DIGIPREMIX_S2(self, sequence = None):
1461  """ Enrich the schedule with the digitisation step"""
1462  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1463 
1464  self.loadAndRemember("SimGeneral/MixingModule/digi_MixPreMix_cfi")
1465 
1466 
1467  if sequence == 'pdigi_valid':
1468  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1469  else:
1470  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1471 
1472  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1473  return
1474 
1475  def prepare_CFWRITER(self, sequence = None):
1476  """ Enrich the schedule with the crossing frame writer step"""
1477  self.loadAndRemember(self.CFWRITERDefaultCFF)
1478  self.scheduleSequence('pcfw','cfwriter_step')
1479  return
1480 
1481  def prepare_DATAMIX(self, sequence = None):
1482  """ Enrich the schedule with the digitisation step"""
1483  self.loadAndRemember(self.DATAMIXDefaultCFF)
1484  self.scheduleSequence('pdatamix','datamixing_step')
1485 
1486  if self._options.pileup_input:
1487  theFiles=''
1488  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1489  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],))[0]
1490  elif self._options.pileup_input.startswith("filelist:"):
1491  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1492  else:
1493  theFiles=self._options.pileup_input.split(',')
1494  #print theFiles
1495  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1496 
1497  return
1498 
1499  def prepare_DIGI2RAW(self, sequence = None):
1500  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGI2RAWDefaultCFF)
1501  self.scheduleSequence(sequence.split('.')[-1],'digi2raw_step')
1502  if "DIGIPREMIX" in self.stepMap.keys():
1503  self.executeAndRemember("process.esDigiToRaw.Label = cms.string('mix')") ##terrible hack - bypass zero suppression
1504  self.executeAndRemember("process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')") ##special readout mode for StripTracker
1505 
1506  return
1507 
1508  def prepare_REPACK(self, sequence = None):
1509  self.loadDefaultOrSpecifiedCFF(sequence,self.REPACKDefaultCFF)
1510  self.scheduleSequence(sequence.split('.')[-1],'digi2repack_step')
1511  return
1512 
1513  def prepare_L1(self, sequence = None):
1514  """ Enrich the schedule with the L1 simulation step"""
1515  assert(sequence == None)
1516  self.loadAndRemember(self.L1EMDefaultCFF)
1517  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1518  return
1519 
1520  def prepare_L1REPACK(self, sequence = None):
1521  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1522  supported = ['GT','GT1','GT2','GCTGT']
1523  if sequence in supported:
1524  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1525  if self._options.scenario == 'HeavyIons':
1526  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1527  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1528  else:
1529  print "L1REPACK with '",sequence,"' is not supported! Supported choices are: ",supported
1530  raise Exception('unsupported feature')
1531 
1532 
1533  def prepare_HLT(self, sequence = None):
1534  """ Enrich the schedule with the HLT simulation step"""
1535  if not sequence:
1536  print "no specification of the hlt menu has been given, should never happen"
1537  raise Exception('no HLT sequence provided')
1538 
1539  if '@' in sequence:
1540  # case where HLT:@something was provided
1541  from Configuration.HLT.autoHLT import autoHLT
1542  key = sequence[1:]
1543  if key in autoHLT:
1544  sequence = autoHLT[key]
1545  else:
1546  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1547 
1548  if ',' in sequence:
1549  #case where HLT:something:something was provided
1550  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1551  optionsForHLT = {}
1552  if self._options.scenario == 'HeavyIons':
1553  optionsForHLT['type'] = 'HIon'
1554  else:
1555  optionsForHLT['type'] = 'GRun'
1556  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.iteritems())
1557  if sequence == 'run,fromSource':
1558  if hasattr(self.process.source,'firstRun'):
1559  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1560  elif hasattr(self.process.source,'setRunNumber'):
1561  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1562  else:
1563  raise Exception('Cannot replace menu to load %s'%(sequence))
1564  else:
1565  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(sequence.replace(',',':'),optionsForHLTConfig))
1566  else:
1567  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % sequence)
1568 
1569  if self._options.isMC:
1570  if self._options.fast:
1571  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFastSim")
1572  else:
1573  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFullSim")
1574 
1575  if self._options.name != 'HLT':
1576  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1577  self.additionalCommands.append('process = ProcessName(process)')
1578  self.additionalCommands.append('')
1579  from HLTrigger.Configuration.CustomConfigs import ProcessName
1580  self.process = ProcessName(self.process)
1581 
1582  self.schedule.append(self.process.HLTSchedule)
1583  [self.blacklist_paths.append(path) for path in self.process.HLTSchedule if isinstance(path,(cms.Path,cms.EndPath))]
1584 
1585  #this is a fake, to be removed with fastim migration and HLT menu dump
1586  if self._options.fast:
1587  if not hasattr(self.process,'HLTEndSequence'):
1588  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1589 
1590 
1591  def prepare_RAW2RECO(self, sequence = None):
1592  if ','in sequence:
1593  seqReco=sequence.split(',')[1]
1594  seqDigi=sequence.split(',')[0]
1595  else:
1596  print "RAW2RECO requires two specifications",sequence,"insufficient"
1597 
1598  self.prepare_RAW2DIGI(seqDigi)
1599  self.prepare_RECO(seqReco)
1600  return
1601 
1602  def prepare_RAW2DIGI(self, sequence = "RawToDigi"):
1603  self.loadDefaultOrSpecifiedCFF(sequence,self.RAW2DIGIDefaultCFF)
1604  self.scheduleSequence(sequence,'raw2digi_step')
1605  # if self._options.isRepacked:
1606  #self.renameInputTagsInSequence(sequence)
1607  return
1608 
1609  def prepare_PATFILTER(self, sequence=None):
1610  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1611  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1612  for filt in allMetFilterPaths:
1613  self.schedule.append(getattr(self.process,'Flag_'+filt))
1614 
1615  def prepare_L1HwVal(self, sequence = 'L1HwVal'):
1616  ''' Enrich the schedule with L1 HW validation '''
1617  self.loadDefaultOrSpecifiedCFF(sequence,self.L1HwValDefaultCFF)
1618  #self.scheduleSequence(sequence.split('.')[-1],'l1hwval_step')
1619  print '\n\n\n DEPRECATED this has no action \n\n\n'
1620  return
1621 
1622  def prepare_L1Reco(self, sequence = "L1Reco"):
1623  ''' Enrich the schedule with L1 reconstruction '''
1624  self.loadDefaultOrSpecifiedCFF(sequence,self.L1RecoDefaultCFF)
1625  self.scheduleSequence(sequence.split('.')[-1],'L1Reco_step')
1626  return
1627 
1628  def prepare_L1TrackTrigger(self, sequence = "L1TrackTrigger"):
1629  ''' Enrich the schedule with L1 reconstruction '''
1631  self.scheduleSequence(sequence.split('.')[-1],'L1TrackTrigger_step')
1632  return
1633 
1634  def prepare_FILTER(self, sequence = None):
1635  ''' Enrich the schedule with a user defined filter sequence '''
1636  ## load the relevant part
1637  filterConfig=self.load(sequence.split('.')[0])
1638  filterSeq=sequence.split('.')[-1]
1639  ## print it in the configuration
1641  def __init__(self):
1642  self.inliner=''
1643  pass
1644  def enter(self,visitee):
1645  try:
1646  label=visitee.label()
1647  ##needs to be in reverse order
1648  self.inliner=label+','+self.inliner
1649  except:
1650  pass
1651  def leave(self,v): pass
1652 
1653  expander=PrintAllModules()
1654  getattr(self.process,filterSeq).visit( expander )
1655  self._options.inlineObjets+=','+expander.inliner
1656  self._options.inlineObjets+=','+filterSeq
1657 
1658  ## put the filtering path in the schedule
1659  self.scheduleSequence(filterSeq,'filtering_step')
1660  self.nextScheduleIsConditional=True
1661  ## put it before all the other paths
1662  self.productionFilterSequence = filterSeq
1663 
1664  return
1665 
1666  def prepare_RECO(self, sequence = "reconstruction"):
1667  ''' Enrich the schedule with reconstruction '''
1668  self.loadDefaultOrSpecifiedCFF(sequence,self.RECODefaultCFF)
1669  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_step')
1670  return
1671 
1672  def prepare_RECOBEFMIX(self, sequence = "reconstruction"):
1673  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1674  if not self._options.fast:
1675  print "ERROR: this step is only implemented for FastSim"
1676  sys.exit()
1677  self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1678  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_befmix_step')
1679  return
1680 
1681  def prepare_PAT(self, sequence = "miniAOD"):
1682  ''' Enrich the schedule with PAT '''
1683  self.prepare_PATFILTER(self)
1684  self.loadDefaultOrSpecifiedCFF(sequence,self.PATDefaultCFF,1) #this is unscheduled
1685  if not self._options.runUnscheduled:
1686  raise Exception("MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1687  if self._options.isData:
1688  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1689  else:
1690  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1691  if self._options.fast:
1692  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff.miniAOD_customizeMETFiltersFastSim")
1693  return
1694 
1695  def prepare_EI(self, sequence = None):
1696  ''' Enrich the schedule with event interpretation '''
1697  from Configuration.StandardSequences.EventInterpretation import EventInterpretation
1698  if sequence in EventInterpretation:
1699  self.EIDefaultCFF = EventInterpretation[sequence]
1700  sequence = 'EIsequence'
1701  else:
1702  raise Exception('Cannot set %s event interpretation'%( sequence) )
1703  self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1704  self.scheduleSequence(sequence.split('.')[-1],'eventinterpretaion_step')
1705  return
1706 
1707  def prepare_SKIM(self, sequence = "all"):
1708  ''' Enrich the schedule with skimming fragments'''
1709  skimConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.SKIMDefaultCFF)
1710  sequence = sequence.split('.')[-1]
1711 
1712  skimlist=sequence.split('+')
1713  ## support @Mu+DiJet+@Electron configuration via autoSkim.py
1714  from Configuration.Skimming.autoSkim import autoSkim
1715  self.expandMapping(skimlist,autoSkim)
1716 
1717  #print "dictionnary for skims:",skimConfig.__dict__
1718  for skim in skimConfig.__dict__:
1719  skimstream = getattr(skimConfig,skim)
1720  if isinstance(skimstream,cms.Path):
1721  #black list the alca path so that they do not appear in the cfg
1722  self.blacklist_paths.append(skimstream)
1723  if (not isinstance(skimstream,cms.FilteredStream)):
1724  continue
1725  shortname = skim.replace('SKIMStream','')
1726  if (sequence=="all"):
1727  self.addExtraStream(skim,skimstream)
1728  elif (shortname in skimlist):
1729  self.addExtraStream(skim,skimstream)
1730  #add a DQM eventcontent for this guy
1731  if self._options.datatier=='DQM':
1732  self.process.load(self.EVTCONTDefaultCFF)
1733  skimstreamDQM = cms.FilteredStream(
1734  responsible = skimstream.responsible,
1735  name = skimstream.name+'DQM',
1736  paths = skimstream.paths,
1737  selectEvents = skimstream.selectEvents,
1738  content = self._options.datatier+'EventContent',
1739  dataTier = cms.untracked.string(self._options.datatier)
1740  )
1741  self.addExtraStream(skim+'DQM',skimstreamDQM)
1742  for i in range(skimlist.count(shortname)):
1743  skimlist.remove(shortname)
1744 
1745 
1746 
1747  if (skimlist.__len__()!=0 and sequence!="all"):
1748  print 'WARNING, possible typo with SKIM:'+'+'.join(skimlist)
1749  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1750 
1751  def prepare_USER(self, sequence = None):
1752  ''' Enrich the schedule with a user defined sequence '''
1753  self.loadDefaultOrSpecifiedCFF(sequence,self.USERDefaultCFF)
1754  self.scheduleSequence(sequence.split('.')[-1],'user_step')
1755  return
1756 
1757  def prepare_POSTRECO(self, sequence = None):
1758  """ Enrich the schedule with the postreco step """
1759  self.loadAndRemember(self.POSTRECODefaultCFF)
1760  self.scheduleSequence('postreco_generator','postreco_step')
1761  return
1762 
1763 
1764  def prepare_VALIDATION(self, sequence = 'validation'):
1765  print sequence,"in preparing validation"
1766  self.loadDefaultOrSpecifiedCFF(sequence,self.VALIDATIONDefaultCFF)
1767  from Validation.Configuration.autoValidation import autoValidation
1768  #in case VALIDATION:something:somethingelse -> something,somethingelse
1769  sequence=sequence.split('.')[-1]
1770  if sequence.find(',')!=-1:
1771  prevalSeqName=sequence.split(',')[0].split('+')
1772  valSeqName=sequence.split(',')[1].split('+')
1773  self.expandMapping(prevalSeqName,autoValidation,index=0)
1774  self.expandMapping(valSeqName,autoValidation,index=1)
1775  else:
1776  if '@' in sequence:
1777  prevalSeqName=sequence.split('+')
1778  valSeqName=sequence.split('+')
1779  self.expandMapping(prevalSeqName,autoValidation,index=0)
1780  self.expandMapping(valSeqName,autoValidation,index=1)
1781  else:
1782  postfix=''
1783  if sequence:
1784  postfix='_'+sequence
1785  prevalSeqName=['prevalidation'+postfix]
1786  valSeqName=['validation'+postfix]
1787  if not hasattr(self.process,valSeqName[0]):
1788  prevalSeqName=['']
1789  valSeqName=[sequence]
1790 
1791  def NFI(index):
1792  ##name from index, required to keep backward compatibility
1793  if index==0:
1794  return ''
1795  else:
1796  return '%s'%index
1797 
1798  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1799  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1800  self._options.restoreRNDSeeds=True
1801 
1802  #rename the HLT process in validation steps
1803  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1804  for s in valSeqName+prevalSeqName:
1805  if s:
1807  for (i,s) in enumerate(prevalSeqName):
1808  if s:
1809  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1810  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1811 
1812  for (i,s) in enumerate(valSeqName):
1813  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1814  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1815 
1816  if not 'DIGI' in self.stepMap and not self._options.fast:
1817  self.executeAndRemember("process.mix.playback = True")
1818  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1819  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1820  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1821 
1822  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1823  #will get in the schedule, smoothly
1824  for (i,s) in enumerate(valSeqName):
1825  getattr(self.process,'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,'validation_step%s'%NFI(i))._seq
1826 
1827  return
1828 
1829 
1831  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1832  It will climb down within PSets, VPSets and VInputTags to find its target"""
1833  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1834  self._paramReplace = paramReplace
1835  self._paramSearch = paramSearch
1836  self._verbose = verbose
1837  self._whitelist = whitelist
1838 
1839  def doIt(self,pset,base):
1840  if isinstance(pset, cms._Parameterizable):
1841  for name in pset.parameters_().keys():
1842  # skip whitelisted parameters
1843  if name in self._whitelist:
1844  continue
1845  # if I use pset.parameters_().items() I get copies of the parameter values
1846  # so I can't modify the nested pset
1847  value = getattr(pset,name)
1848  type = value.pythonTypeName()
1849  if type in ('cms.PSet', 'cms.untracked.PSet'):
1850  self.doIt(value,base+"."+name)
1851  elif type in ('cms.VPSet', 'cms.untracked.VPSet'):
1852  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1853  elif type in ('cms.string', 'cms.untracked.string'):
1854  if value.value() == self._paramSearch:
1855  if self._verbose: print "set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace)
1856  setattr(pset, name,self._paramReplace)
1857  elif type in ('cms.VInputTag', 'cms.untracked.VInputTag'):
1858  for (i,n) in enumerate(value):
1859  if not isinstance(n, cms.InputTag):
1860  n=cms.InputTag(n)
1861  if n.processName == self._paramSearch:
1862  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1863  if self._verbose:print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace)
1864  setattr(n,"processName",self._paramReplace)
1865  value[i]=n
1866  elif type in ('cms.vstring', 'cms.untracked.vstring'):
1867  for (i,n) in enumerate(value):
1868  if n==self._paramSearch:
1869  getattr(pset,name)[i]=self._paramReplace
1870  elif type in ('cms.InputTag', 'cms.untracked.InputTag'):
1871  if value.processName == self._paramSearch:
1872  if self._verbose: print "set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace)
1873  setattr(getattr(pset, name),"processName",self._paramReplace)
1874 
1875  def enter(self,visitee):
1876  label = ''
1877  try:
1878  label = visitee.label()
1879  except AttributeError:
1880  label = '<Module not in a Process>'
1881  except:
1882  label = 'other execption'
1883  self.doIt(visitee, label)
1884 
1885  def leave(self,visitee):
1886  pass
1887 
1888  #visit a sequence to repalce all input tags
1889  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1890  print "Replacing all InputTag %s => %s"%(oldT,newT)
1891  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1892  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1893  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1894  if not loadMe in self.additionalCommands:
1895  self.additionalCommands.append(loadMe)
1896  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1897 
1898  #change the process name used to address HLT results in any sequence
1899  def renameHLTprocessInSequence(self,sequence,proc=None,HLTprocess='HLT'):
1900  if self._options.hltProcess:
1901  proc=self._options.hltProcess
1902  else:
1903  proc=self.process.name_()
1904  if proc==HLTprocess: return
1905  # look up all module in dqm sequence
1906  print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1907  getattr(self.process,sequence).visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess,proc,whitelist = ("subSystemFolder",)))
1908  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1909  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1910  self.additionalCommands.append('process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1911 
1912 
1913  def expandMapping(self,seqList,mapping,index=None):
1914  maxLevel=20
1915  level=0
1916  while '@' in repr(seqList) and level<maxLevel:
1917  level+=1
1918  for specifiedCommand in seqList:
1919  if specifiedCommand.startswith('@'):
1920  location=specifiedCommand[1:]
1921  if not location in mapping:
1922  raise Exception("Impossible to map "+location+" from "+repr(mapping))
1923  mappedTo=mapping[location]
1924  if index!=None:
1925  mappedTo=mappedTo[index]
1926  seqList.remove(specifiedCommand)
1927  seqList.extend(mappedTo.split('+'))
1928  break;
1929  if level==maxLevel:
1930  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
1931 
1932  def prepare_DQM(self, sequence = 'DQMOffline'):
1933  # this one needs replacement
1934 
1935  self.loadDefaultOrSpecifiedCFF(sequence,self.DQMOFFLINEDefaultCFF)
1936  sequenceList=sequence.split('.')[-1].split('+')
1937  from DQMOffline.Configuration.autoDQM import autoDQM
1938  self.expandMapping(sequenceList,autoDQM,index=0)
1939 
1940  if len(set(sequenceList))!=len(sequenceList):
1941  sequenceList=list(set(sequenceList))
1942  print "Duplicate entries for DQM:, using",sequenceList
1943  pathName='dqmoffline_step'
1944 
1945  for (i,sequence) in enumerate(sequenceList):
1946  if (i!=0):
1947  pathName='dqmoffline_%d_step'%(i)
1948 
1949  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
1950  self.renameHLTprocessInSequence(sequence)
1951 
1952  # if both HLT and DQM are run in the same process, schedule [HLT]DQM in an EndPath
1953  if 'HLT' in self.stepMap.keys():
1954  # need to put [HLT]DQM in an EndPath, to access the HLT trigger results
1955  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1956  else:
1957  # schedule DQM as a standard Path
1958  setattr(self.process,pathName, cms.Path( getattr(self.process, sequence) ) )
1959  self.schedule.append(getattr(self.process,pathName))
1960 
1961 
1962  def prepare_HARVESTING(self, sequence = None):
1963  """ Enrich the process with harvesting step """
1964  self.EDMtoMECFF='Configuration/StandardSequences/EDMtoME'+self._options.harvesting+'_cff'
1965  self.loadAndRemember(self.EDMtoMECFF)
1966  self.scheduleSequence('EDMtoME','edmtome_step')
1967 
1968  harvestingConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.HARVESTINGDefaultCFF)
1969  sequence = sequence.split('.')[-1]
1970 
1971  # decide which HARVESTING paths to use
1972  harvestingList = sequence.split("+")
1973  from DQMOffline.Configuration.autoDQM import autoDQM
1974  from Validation.Configuration.autoValidation import autoValidation
1975  import copy
1976  combined_mapping = copy.deepcopy( autoDQM )
1977  combined_mapping.update( autoValidation )
1978  self.expandMapping(harvestingList,combined_mapping,index=-1)
1979 
1980  if len(set(harvestingList))!=len(harvestingList):
1981  harvestingList=list(set(harvestingList))
1982  print "Duplicate entries for HARVESTING, using",harvestingList
1983 
1984  for name in harvestingList:
1985  if not name in harvestingConfig.__dict__:
1986  print name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1987  continue
1988  harvestingstream = getattr(harvestingConfig,name)
1989  if isinstance(harvestingstream,cms.Path):
1990  self.schedule.append(harvestingstream)
1991  self.blacklist_paths.append(harvestingstream)
1992  if isinstance(harvestingstream,cms.Sequence):
1993  setattr(self.process,name+"_step",cms.Path(harvestingstream))
1994  self.schedule.append(getattr(self.process,name+"_step"))
1995 
1996  self.scheduleSequence('DQMSaver','dqmsave_step')
1997  return
1998 
1999  def prepare_ALCAHARVEST(self, sequence = None):
2000  """ Enrich the process with AlCaHarvesting step """
2001  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2002  sequence=sequence.split(".")[-1]
2003 
2004  # decide which AlcaHARVESTING paths to use
2005  harvestingList = sequence.split("+")
2006 
2007 
2008 
2009  from Configuration.AlCa.autoPCL import autoPCL
2010  self.expandMapping(harvestingList,autoPCL)
2011 
2012  for name in harvestingConfig.__dict__:
2013  harvestingstream = getattr(harvestingConfig,name)
2014  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2015  self.schedule.append(harvestingstream)
2016  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2017  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2018  harvestingList.remove(name)
2019  # append the common part at the end of the sequence
2020  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2021  self.schedule.append(lastStep)
2022 
2023  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2024  print "The following harvesting could not be found : ", harvestingList
2025  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2026 
2027 
2028 
2029  def prepare_ENDJOB(self, sequence = 'endOfProcess'):
2030  self.loadDefaultOrSpecifiedCFF(sequence,self.ENDJOBDefaultCFF)
2031  self.scheduleSequenceAtEnd(sequence.split('.')[-1],'endjob_step')
2032  return
2033 
2035  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2036  self.schedule.append(self.process.reconstruction)
2037 
2038 
2039  def build_production_info(self, evt_type, evtnumber):
2040  """ Add useful info for the production. """
2041  self.process.configurationMetadata=cms.untracked.PSet\
2042  (version=cms.untracked.string("$Revision: 1.19 $"),
2043  name=cms.untracked.string("Applications"),
2044  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2045  )
2046 
2047  self.addedObjects.append(("Production Info","configurationMetadata"))
2048 
2049 
2050  def prepare(self, doChecking = False):
2051  """ Prepare the configuration string and add missing pieces."""
2052 
2053  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2054  self.addMaxEvents()
2055  if self.with_input:
2056  self.addSource()
2057  self.addStandardSequences()
2058  ##adding standard sequences might change the inputEventContent option and therefore needs to be finalized after
2059  self.completeInputCommand()
2060  self.addConditions()
2061 
2062 
2063  outputModuleCfgCode=""
2064  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2065  outputModuleCfgCode=self.addOutput()
2066 
2067  self.addCommon()
2068 
2069  self.pythonCfgCode = "# Auto generated configuration file\n"
2070  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2071  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2072  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2073  if hasattr(self._options,"era") and self._options.era :
2074  self.pythonCfgCode += "from Configuration.StandardSequences.Eras import eras\n\n"
2075  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"'" # Start of the line, finished after the loop
2076  # Multiple eras can be specified in a comma seperated list
2077  for requestedEra in self._options.era.split(",") :
2078  self.pythonCfgCode += ",eras."+requestedEra
2079  self.pythonCfgCode += ")\n\n" # end of the line
2080  else :
2081  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"')\n\n"
2082 
2083  self.pythonCfgCode += "# import of standard configurations\n"
2084  for module in self.imports:
2085  self.pythonCfgCode += ("process.load('"+module+"')\n")
2086 
2087  # production info
2088  if not hasattr(self.process,"configurationMetadata"):
2089  self.build_production_info(self._options.evt_type, self._options.number)
2090  else:
2091  #the PSet was added via a load
2092  self.addedObjects.append(("Production Info","configurationMetadata"))
2093 
2094  self.pythonCfgCode +="\n"
2095  for comment,object in self.addedObjects:
2096  if comment!="":
2097  self.pythonCfgCode += "\n# "+comment+"\n"
2098  self.pythonCfgCode += dumpPython(self.process,object)
2099 
2100  # dump the output definition
2101  self.pythonCfgCode += "\n# Output definition\n"
2102  self.pythonCfgCode += outputModuleCfgCode
2103 
2104  # dump all additional outputs (e.g. alca or skim streams)
2105  self.pythonCfgCode += "\n# Additional output definition\n"
2106  #I do not understand why the keys are not normally ordered.
2107  nl=self.additionalOutputs.keys()
2108  nl.sort()
2109  for name in nl:
2110  output = self.additionalOutputs[name]
2111  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2112  tmpOut = cms.EndPath(output)
2113  setattr(self.process,name+'OutPath',tmpOut)
2114  self.schedule.append(tmpOut)
2115 
2116  # dump all additional commands
2117  self.pythonCfgCode += "\n# Other statements\n"
2118  for command in self.additionalCommands:
2119  self.pythonCfgCode += command + "\n"
2120 
2121  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2122  for object in self._options.inlineObjets.split(','):
2123  if not object:
2124  continue
2125  if not hasattr(self.process,object):
2126  print 'cannot inline -'+object+'- : not known'
2127  else:
2128  self.pythonCfgCode +='\n'
2129  self.pythonCfgCode +=dumpPython(self.process,object)
2130 
2131  # dump all paths
2132  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2133  for path in self.process.paths:
2134  if getattr(self.process,path) not in self.blacklist_paths:
2135  self.pythonCfgCode += dumpPython(self.process,path)
2136 
2137  for endpath in self.process.endpaths:
2138  if getattr(self.process,endpath) not in self.blacklist_paths:
2139  self.pythonCfgCode += dumpPython(self.process,endpath)
2140 
2141  # dump the schedule
2142  self.pythonCfgCode += "\n# Schedule definition\n"
2143  result = "process.schedule = cms.Schedule("
2144 
2145  # handling of the schedule
2146  self.process.schedule = cms.Schedule()
2147  for item in self.schedule:
2148  if not isinstance(item, cms.Schedule):
2149  self.process.schedule.append(item)
2150  else:
2151  self.process.schedule.extend(item)
2152 
2153  if hasattr(self.process,"HLTSchedule"):
2154  beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2155  afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2156  pathNames = ['process.'+p.label_() for p in beforeHLT]
2157  result += ','.join(pathNames)+')\n'
2158  result += 'process.schedule.extend(process.HLTSchedule)\n'
2159  pathNames = ['process.'+p.label_() for p in afterHLT]
2160  result += 'process.schedule.extend(['+','.join(pathNames)+'])\n'
2161  else:
2162  pathNames = ['process.'+p.label_() for p in self.schedule]
2163  result ='process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2164 
2165  self.pythonCfgCode += result
2166 
2167  if self._options.nThreads is not "1":
2168  self.pythonCfgCode +="\n"
2169  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2170  self.pythonCfgCode +="process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+")\n"
2171  self.pythonCfgCode +="process.options.numberOfStreams=cms.untracked.uint32(0)\n"
2172  #repacked version
2173  if self._options.isRepacked:
2174  self.pythonCfgCode +="\n"
2175  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2176  self.pythonCfgCode +="MassReplaceInputTag(process)\n"
2177  MassReplaceInputTag(self.process)
2178 
2179  # special treatment in case of production filter sequence 2/2
2180  if self.productionFilterSequence:
2181  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2182  self.pythonCfgCode +='for path in process.paths:\n'
2183  if len(self.conditionalPaths):
2184  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2185  if len(self.excludedPaths):
2186  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2187  self.pythonCfgCode +='\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2188  pfs = getattr(self.process,self.productionFilterSequence)
2189  for path in self.process.paths:
2190  if not path in self.conditionalPaths: continue
2191  if path in self.excludedPaths: continue
2192  getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2193 
2194 
2195  # dump customise fragment
2196  self.pythonCfgCode += self.addCustomise()
2197 
2198  if self._options.runUnscheduled:
2199  # prune and delete paths
2200  #this is not supporting the blacklist at this point since I do not understand it
2201  self.pythonCfgCode+="#do not add changes to your config after this point (unless you know what you are doing)\n"
2202  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import convertToUnscheduled\n"
2203  self.pythonCfgCode+="process=convertToUnscheduled(process)\n"
2204 
2205  from FWCore.ParameterSet.Utilities import convertToUnscheduled
2206  self.process=convertToUnscheduled(self.process)
2207 
2208  #now add the unscheduled stuff
2209  for module in self.importsUnsch:
2210  self.process.load(module)
2211  self.pythonCfgCode += ("process.load('"+module+"')\n")
2212 
2213  #and clean the unscheduled stuff
2214  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import cleanUnscheduled\n"
2215  self.pythonCfgCode+="process=cleanUnscheduled(process)\n"
2216 
2217  from FWCore.ParameterSet.Utilities import cleanUnscheduled
2218  self.process=cleanUnscheduled(self.process)
2219 
2220 
2221  self.pythonCfgCode += self.addCustomise(1)
2222 
2223 
2224  # make the .io file
2225 
2226  if self._options.io:
2227  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2228  if not self._options.io.endswith('.io'): self._option.io+='.io'
2229  io=open(self._options.io,'w')
2230  ioJson={}
2231  if hasattr(self.process.source,"fileNames"):
2232  if len(self.process.source.fileNames.value()):
2233  ioJson['primary']=self.process.source.fileNames.value()
2234  if hasattr(self.process.source,"secondaryFileNames"):
2235  if len(self.process.source.secondaryFileNames.value()):
2236  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2237  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2238  ioJson['pileup']=self._options.pileup_input[4:]
2239  for (o,om) in self.process.outputModules_().items():
2240  ioJson[o]=om.fileName.value()
2241  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2242  if self.productionFilterSequence:
2243  ioJson['filter']=self.productionFilterSequence
2244  import json
2245  io.write(json.dumps(ioJson))
2246  return
2247 
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:34
assert(m_qm.get())
def visit
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_RECO
put the filtering path in the schedule
tuple zip
Definition: archive.py:476
def massSearchReplaceAnyInputTag
Definition: helpers.py:262
def defineMixing
Definition: Mixing.py:167
inliner
needs to be in reverse order
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def convertToUnscheduled
Definition: Utilities.py:69
list object
Definition: dbtoconf.py:77
def cleanUnscheduled
Definition: Utilities.py:107
double split
Definition: MVATrainer.cc:139
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run