CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python
2 
3 __version__ = "$Revision: 1.19 $"
4 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
5 
6 import FWCore.ParameterSet.Config as cms
7 from FWCore.ParameterSet.Modules import _Module
8 import sys
9 import re
10 import collections
11 import FWCore.ParameterSet.DictTypes as DictTypes
12 class Options:
13  pass
14 
15 # the canonical defaults
16 defaultOptions = Options()
17 defaultOptions.datamix = 'DataOnSim'
18 defaultOptions.isMC=False
19 defaultOptions.isData=True
20 defaultOptions.step=''
21 defaultOptions.pileup='NoPileUp'
22 defaultOptions.pileup_input = None
23 defaultOptions.geometry = 'SimDB'
24 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
25 defaultOptions.magField = ''
26 defaultOptions.conditions = None
27 defaultOptions.useCondDBv1 = False
28 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
29 defaultOptions.harvesting= 'AtRunEnd'
30 defaultOptions.gflash = False
31 defaultOptions.number = -1
32 defaultOptions.number_out = None
33 defaultOptions.arguments = ""
34 defaultOptions.name = "NO NAME GIVEN"
35 defaultOptions.evt_type = ""
36 defaultOptions.filein = ""
37 defaultOptions.dasquery=""
38 defaultOptions.secondfilein = ""
39 defaultOptions.customisation_file = []
40 defaultOptions.customisation_file_unsch = []
41 defaultOptions.customise_commands = ""
42 defaultOptions.inline_custom=False
43 defaultOptions.particleTable = 'pythiapdt'
44 defaultOptions.particleTableList = ['pythiapdt','pdt']
45 defaultOptions.dirin = ''
46 defaultOptions.dirout = ''
47 defaultOptions.filetype = 'EDM'
48 defaultOptions.fileout = 'output.root'
49 defaultOptions.filtername = ''
50 defaultOptions.lazy_download = False
51 defaultOptions.custom_conditions = ''
52 defaultOptions.hltProcess = ''
53 defaultOptions.eventcontent = None
54 defaultOptions.datatier = None
55 defaultOptions.inlineEventContent = True
56 defaultOptions.inlineObjets =''
57 defaultOptions.hideGen=False
58 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
59 defaultOptions.beamspot=None
60 defaultOptions.outputDefinition =''
61 defaultOptions.inputCommands = None
62 defaultOptions.outputCommands = None
63 defaultOptions.inputEventContent = ''
64 defaultOptions.dropDescendant = False
65 defaultOptions.relval = None
66 defaultOptions.slhc = None
67 defaultOptions.profile = None
68 defaultOptions.isRepacked = False
69 defaultOptions.restoreRNDSeeds = False
70 defaultOptions.donotDropOnInput = ''
71 defaultOptions.python_filename =''
72 defaultOptions.io=None
73 defaultOptions.lumiToProcess=None
74 defaultOptions.fast=False
75 defaultOptions.runsAndWeightsForMC = None
76 defaultOptions.runsScenarioForMC = None
77 defaultOptions.runUnscheduled = False
78 defaultOptions.timeoutOutput = False
79 defaultOptions.nThreads = '1'
80 
81 # some helper routines
82 def dumpPython(process,name):
83  theObject = getattr(process,name)
84  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
85  return "process."+name+" = " + theObject.dumpPython("process")
86  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
87  return "process."+name+" = " + theObject.dumpPython()+"\n"
88  else:
89  return "process."+name+" = " + theObject.dumpPython()+"\n"
90 def filesFromList(fileName,s=None):
91  import os
92  import FWCore.ParameterSet.Config as cms
93  prim=[]
94  sec=[]
95  for line in open(fileName,'r'):
96  if line.count(".root")>=2:
97  #two files solution...
98  entries=line.replace("\n","").split()
99  if not entries[0] in prim:
100  prim.append(entries[0])
101  if not entries[1] in sec:
102  sec.append(entries[1])
103  elif (line.find(".root")!=-1):
104  entry=line.replace("\n","")
105  if not entry in prim:
106  prim.append(entry)
107  if s:
108  if not hasattr(s,"fileNames"):
109  s.fileNames=cms.untracked.vstring(prim)
110  else:
111  s.fileNames.extend(prim)
112  if len(sec)!=0:
113  if not hasattr(s,"secondaryFileNames"):
114  s.secondaryFileNames=cms.untracked.vstring(sec)
115  else:
116  s.secondaryFileNames.extend(sec)
117  print "found files: ",prim
118  if len(prim)==0:
119  raise Exception("There are not files in input from the file list")
120  if len(sec)!=0:
121  print "found parent files:",sec
122  return (prim,sec)
123 
124 def filesFromDASQuery(query,s=None):
125  import os
126  import FWCore.ParameterSet.Config as cms
127  prim=[]
128  sec=[]
129  print "the query is",query
130  for line in os.popen('das_client.py --query "%s"'%(query)):
131  if line.count(".root")>=2:
132  #two files solution...
133  entries=line.replace("\n","").split()
134  if not entries[0] in prim:
135  prim.append(entries[0])
136  if not entries[1] in sec:
137  sec.append(entries[1])
138  elif (line.find(".root")!=-1):
139  entry=line.replace("\n","")
140  if not entry in prim:
141  prim.append(entry)
142  if s:
143  if not hasattr(s,"fileNames"):
144  s.fileNames=cms.untracked.vstring(prim)
145  else:
146  s.fileNames.extend(prim)
147  if len(sec)!=0:
148  if not hasattr(s,"secondaryFileNames"):
149  s.secondaryFileNames=cms.untracked.vstring(sec)
150  else:
151  s.secondaryFileNames.extend(sec)
152  print "found files: ",prim
153  if len(sec)!=0:
154  print "found parent files:",sec
155  return (prim,sec)
156 
157 def MassReplaceInputTag(aProcess,oldT="rawDataCollector",newT="rawDataRepacker"):
158  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
159  for s in aProcess.paths_().keys():
160  massSearchReplaceAnyInputTag(getattr(aProcess,s),oldT,newT)
161 
162 def anyOf(listOfKeys,dict,opt=None):
163  for k in listOfKeys:
164  if k in dict:
165  toReturn=dict[k]
166  dict.pop(k)
167  return toReturn
168  if opt!=None:
169  return opt
170  else:
171  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
172 
174  """The main building routines """
175 
176  def __init__(self, options, process = None, with_output = False, with_input = False ):
177  """options taken from old cmsDriver and optparse """
178 
179  options.outfile_name = options.dirout+options.fileout
180 
181  self._options = options
182 
183  if self._options.isData and options.isMC:
184  raise Exception("ERROR: You may specify only --data or --mc, not both")
185  #if not self._options.conditions:
186  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
187 
188  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
189  if 'ENDJOB' in self._options.step:
190  if (hasattr(self._options,"outputDefinition") and \
191  self._options.outputDefinition != '' and \
192  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
193  (hasattr(self._options,"datatier") and \
194  self._options.datatier and \
195  'DQMIO' in self._options.datatier):
196  print "removing ENDJOB from steps since not compatible with DQMIO dataTier"
197  self._options.step=self._options.step.replace(',ENDJOB','')
198 
199 
200 
201  # what steps are provided by this class?
202  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
203  self.stepMap={}
204  self.stepKeys=[]
205  for step in self._options.step.split(","):
206  if step=='': continue
207  stepParts = step.split(":")
208  stepName = stepParts[0]
209  if stepName not in stepList and not stepName.startswith('re'):
210  raise ValueError("Step "+stepName+" unknown")
211  if len(stepParts)==1:
212  self.stepMap[stepName]=""
213  elif len(stepParts)==2:
214  self.stepMap[stepName]=stepParts[1].split('+')
215  elif len(stepParts)==3:
216  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
217  else:
218  raise ValueError("Step definition "+step+" invalid")
219  self.stepKeys.append(stepName)
220 
221  #print "map of steps is:",self.stepMap
222 
223  self.with_output = with_output
224  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
225  self.with_output = False
226  self.with_input = with_input
227  if process == None:
228  self.process = cms.Process(self._options.name)
229  else:
230  self.process = process
231  self.imports = []
232  self.importsUnsch = []
233  self.define_Configs()
234  self.schedule = list()
235 
236  # we are doing three things here:
237  # creating a process to catch errors
238  # building the code to re-create the process
239 
240  self.additionalCommands = []
241  # TODO: maybe a list of to be dumped objects would help as well
242  self.blacklist_paths = []
243  self.addedObjects = []
244  self.additionalOutputs = {}
245 
246  self.productionFilterSequence = None
247  self.nextScheduleIsConditional=False
248  self.conditionalPaths=[]
249  self.excludedPaths=[]
250 
251  def profileOptions(self):
252  """
253  addIgProfService
254  Function to add the igprof profile service so that you can dump in the middle
255  of the run.
256  """
257  profileOpts = self._options.profile.split(':')
258  profilerStart = 1
259  profilerInterval = 100
260  profilerFormat = None
261  profilerJobFormat = None
262 
263  if len(profileOpts):
264  #type, given as first argument is unused here
265  profileOpts.pop(0)
266  if len(profileOpts):
267  startEvent = profileOpts.pop(0)
268  if not startEvent.isdigit():
269  raise Exception("%s is not a number" % startEvent)
270  profilerStart = int(startEvent)
271  if len(profileOpts):
272  eventInterval = profileOpts.pop(0)
273  if not eventInterval.isdigit():
274  raise Exception("%s is not a number" % eventInterval)
275  profilerInterval = int(eventInterval)
276  if len(profileOpts):
277  profilerFormat = profileOpts.pop(0)
278 
279 
280  if not profilerFormat:
281  profilerFormat = "%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace("_cfi", ""),
282  self._options.step,
283  self._options.pileup,
284  self._options.conditions,
285  self._options.datatier,
286  self._options.profileTypeLabel)
287  if not profilerJobFormat and profilerFormat.endswith(".gz"):
288  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
289  elif not profilerJobFormat:
290  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
291 
292  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
293 
294  def load(self,includeFile):
295  includeFile = includeFile.replace('/','.')
296  self.process.load(includeFile)
297  return sys.modules[includeFile]
298 
299  def loadAndRemember(self, includeFile,unsch=0):
300  """helper routine to load am memorize imports"""
301  # we could make the imports a on-the-fly data method of the process instance itself
302  # not sure if the latter is a good idea
303  includeFile = includeFile.replace('/','.')
304  if unsch==0:
305  self.imports.append(includeFile)
306  self.process.load(includeFile)
307  return sys.modules[includeFile]
308  else:
309  self.importsUnsch.append(includeFile)
310  return 0#sys.modules[includeFile]
311 
312  def executeAndRemember(self, command):
313  """helper routine to remember replace statements"""
314  self.additionalCommands.append(command)
315  if not command.strip().startswith("#"):
316  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
317  import re
318  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
319  #exec(command.replace("process.","self.process."))
320 
321  def addCommon(self):
322  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
323  self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring('ProductNotFound'),fileMode = cms.untracked.string('FULLMERGE'))
324  else:
325  self.process.options = cms.untracked.PSet( )
326 
327  if self._options.runUnscheduled:
328  self.process.options.allowUnscheduled=cms.untracked.bool(True)
329 
330  self.addedObjects.append(("","options"))
331 
332  if self._options.lazy_download:
333  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
334  stats = cms.untracked.bool(True),
335  enable = cms.untracked.bool(True),
336  cacheHint = cms.untracked.string("lazy-download"),
337  readHint = cms.untracked.string("read-ahead-buffered")
338  )
339  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
340 
341  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
342  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
343 
344  if self._options.profile:
345  (start, interval, eventFormat, jobFormat)=self.profileOptions()
346  self.process.IgProfService = cms.Service("IgProfService",
347  reportFirstEvent = cms.untracked.int32(start),
348  reportEventInterval = cms.untracked.int32(interval),
349  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
350  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
351  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
352 
353  def addMaxEvents(self):
354  """Here we decide how many evts will be processed"""
355  self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(int(self._options.number)))
356  if self._options.number_out:
357  self.process.maxEvents.output = cms.untracked.int32(int(self._options.number_out))
358  self.addedObjects.append(("","maxEvents"))
359 
360  def addSource(self):
361  """Here the source is built. Priority: file, generator"""
362  self.addedObjects.append(("Input source","source"))
363 
364  def filesFromOption(self):
365  for entry in self._options.filein.split(','):
366  print "entry",entry
367  if entry.startswith("filelist:"):
368  filesFromList(entry[9:],self.process.source)
369  elif entry.startswith("dbs:") or entry.startswith("das:"):
370  filesFromDASQuery('file dataset = %s'%(entry[4:]),self.process.source)
371  else:
372  self.process.source.fileNames.append(self._options.dirin+entry)
373  if self._options.secondfilein:
374  if not hasattr(self.process.source,"secondaryFileNames"):
375  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
376  for entry in self._options.secondfilein.split(','):
377  print "entry",entry
378  if entry.startswith("filelist:"):
379  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
380  elif entry.startswith("dbs:") or entry.startswith("das:"):
381  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:])))[0])
382  else:
383  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
384 
385  if self._options.filein or self._options.dasquery:
386  if self._options.filetype == "EDM":
387  self.process.source=cms.Source("PoolSource",
388  fileNames = cms.untracked.vstring(),
389  secondaryFileNames= cms.untracked.vstring())
390  filesFromOption(self)
391  elif self._options.filetype == "DAT":
392  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
393  filesFromOption(self)
394  elif self._options.filetype == "LHE":
395  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
396  if self._options.filein.startswith("lhe:"):
397  #list the article directory automatically
398  args=self._options.filein.split(':')
399  article=args[1]
400  print 'LHE input from article ',article
401  location='/store/lhe/'
402  import os
403  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
404  for line in textOfFiles:
405  for fileName in [x for x in line.split() if '.lhe' in x]:
406  self.process.source.fileNames.append(location+article+'/'+fileName)
407  if len(args)>2:
408  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
409  else:
410  filesFromOption(self)
411 
412 
413  elif self._options.filetype == "DQM":
414  self.process.source=cms.Source("DQMRootSource",
415  fileNames = cms.untracked.vstring())
416  filesFromOption(self)
417 
418  elif self._options.filetype == "DQMDAQ":
419  # FIXME: how to configure it if there are no input files specified?
420  self.process.source=cms.Source("DQMStreamerReader")
421 
422 
423  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
424  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
425 
426  if self._options.dasquery!='':
427  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
428  filesFromDASQuery(self._options.dasquery,self.process.source)
429 
430  ##drop LHEXMLStringProduct on input to save memory if appropriate
431  if 'GEN' in self.stepMap.keys():
432  if self._options.inputCommands:
433  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
434  else:
435  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
436 
437  if self.process.source and self._options.inputCommands:
438  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
439  for command in self._options.inputCommands.split(','):
440  # remove whitespace around the keep/drop statements
441  command = command.strip()
442  if command=='': continue
443  self.process.source.inputCommands.append(command)
444  if not self._options.dropDescendant:
445  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
446 
447  if self._options.lumiToProcess:
448  import FWCore.PythonUtilities.LumiList as LumiList
449  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
450 
451  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
452  if self.process.source is None:
453  self.process.source=cms.Source("EmptySource")
454 
455  # modify source in case of run-dependent MC
456  self.runsAndWeights=None
457  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
458  if not self._options.isMC :
459  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
460  if self._options.runsAndWeightsForMC:
461  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
462  else:
463  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
464  if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
465  __import__(RunsAndWeights[self._options.runsScenarioForMC])
466  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
467  else:
468  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
469 
470  if self.runsAndWeights:
471  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
472  ThrowAndSetRandomRun.throwAndSetRandomRun(self.process.source,self.runsAndWeights)
473  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
474  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
475 
476  return
477 
478  def addOutput(self):
479  """ Add output module to the process """
480  result=""
481  if self._options.outputDefinition:
482  if self._options.datatier:
483  print "--datatier & --eventcontent options ignored"
484 
485  #new output convention with a list of dict
486  outList = eval(self._options.outputDefinition)
487  for (id,outDefDict) in enumerate(outList):
488  outDefDictStr=outDefDict.__str__()
489  if not isinstance(outDefDict,dict):
490  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
491  #requires option: tier
492  theTier=anyOf(['t','tier','dataTier'],outDefDict)
493  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
494  ## event content
495  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
496  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
497  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
498  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
499  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
500  # module label has a particular role
501  if not theModuleLabel:
502  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
503  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
504  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
505  ]
506  for name in tryNames:
507  if not hasattr(self.process,name):
508  theModuleLabel=name
509  break
510  if not theModuleLabel:
511  raise Exception("cannot find a module label for specification: "+outDefDictStr)
512  if id==0:
513  defaultFileName=self._options.outfile_name
514  else:
515  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
516 
517  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
518  if not theFileName.endswith('.root'):
519  theFileName+='.root'
520 
521  if len(outDefDict.keys()):
522  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
523  if theStreamType=='DQMIO': theStreamType='DQM'
524  if theStreamType=='ALL':
525  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
526  else:
527  theEventContent = getattr(self.process, theStreamType+"EventContent")
528 
529 
530  addAlCaSelects=False
531  if theStreamType=='ALCARECO' and not theFilterName:
532  theFilterName='StreamALCACombined'
533  addAlCaSelects=True
534 
535  CppType='PoolOutputModule'
536  if self._options.timeoutOutput:
537  CppType='TimeoutPoolOutputModule'
538  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
539  output = cms.OutputModule(CppType,
540  theEventContent.clone(),
541  fileName = cms.untracked.string(theFileName),
542  dataset = cms.untracked.PSet(
543  dataTier = cms.untracked.string(theTier),
544  filterName = cms.untracked.string(theFilterName))
545  )
546  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
547  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
548  if not theSelectEvent and hasattr(self.process,'filtering_step'):
549  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
550  if theSelectEvent:
551  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
552 
553  if addAlCaSelects:
554  if not hasattr(output,'SelectEvents'):
555  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
556  for alca in self.AlCaPaths:
557  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
558 
559 
560  if hasattr(self.process,theModuleLabel):
561  raise Exception("the current process already has a module "+theModuleLabel+" defined")
562  #print "creating output module ",theModuleLabel
563  setattr(self.process,theModuleLabel,output)
564  outputModule=getattr(self.process,theModuleLabel)
565  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
566  path=getattr(self.process,theModuleLabel+'_step')
567  self.schedule.append(path)
568 
569  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
570  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"):
571  return label
572  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
573  if theExtraOutputCommands:
574  if not isinstance(theExtraOutputCommands,list):
575  raise Exception("extra ouput command in --option must be a list of strings")
576  if hasattr(self.process,theStreamType+"EventContent"):
577  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
578  else:
579  outputModule.outputCommands.extend(theExtraOutputCommands)
580 
581  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
582 
583  ##ends the --output options model
584  return result
585 
586  streamTypes=self._options.eventcontent.split(',')
587  tiers=self._options.datatier.split(',')
588  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
589  raise Exception("number of event content arguments does not match number of datatier arguments")
590 
591  # if the only step is alca we don't need to put in an output
592  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
593  return "\n"
594 
595  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
596  if streamType=='': continue
597  if streamType=='DQMIO': streamType='DQM'
598  theEventContent = getattr(self.process, streamType+"EventContent")
599  if i==0:
600  theFileName=self._options.outfile_name
601  theFilterName=self._options.filtername
602  else:
603  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
604  theFilterName=self._options.filtername
605  CppType='PoolOutputModule'
606  if self._options.timeoutOutput:
607  CppType='TimeoutPoolOutputModule'
608  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
609  output = cms.OutputModule(CppType,
610  theEventContent,
611  fileName = cms.untracked.string(theFileName),
612  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
613  filterName = cms.untracked.string(theFilterName)
614  )
615  )
616  if hasattr(self.process,"generation_step") and streamType!='LHE':
617  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
618  if hasattr(self.process,"filtering_step"):
619  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
620 
621  if streamType=='ALCARECO':
622  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
623 
624  if "MINIAOD" in streamType:
625  output.dropMetaData = cms.untracked.string('ALL')
626  output.fastCloning= cms.untracked.bool(False)
627  output.overrideInputFileSplitLevels = cms.untracked.bool(True)
628 
629  outputModuleName=streamType+'output'
630  setattr(self.process,outputModuleName,output)
631  outputModule=getattr(self.process,outputModuleName)
632  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
633  path=getattr(self.process,outputModuleName+'_step')
634  self.schedule.append(path)
635 
636  if self._options.outputCommands and streamType!='DQM':
637  for evct in self._options.outputCommands.split(','):
638  if not evct: continue
639  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
640 
641  if not self._options.inlineEventContent:
642  def doNotInlineEventContent(instance,label = "process."+streamType+"EventContent.outputCommands"):
643  return label
644  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
645 
646  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
647 
648  return result
649 
651  """
652  Add selected standard sequences to the process
653  """
654  # load the pile up file
655  if self._options.pileup:
656  pileupSpec=self._options.pileup.split(',')[0]
657 
658  # FastSim: GEN-mixing or DIGI-RECO mixing?
659  GEN_mixing = False
660  if self._options.fast and pileupSpec.find("GEN_") == 0:
661  GEN_mixing = True
662  pileupSpec = pileupSpec[4:]
663 
664  # Does the requested pile-up scenario exist?
665  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
666  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
667  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
668  if self._options.fast:
669  message += "\n-"*20+"\n additional options for FastSim (gen-mixing):\n" + "-"*20 + "\n" + '\n'.join(["GEN_" + x for x in Mixing.keys()]) + "\n"
670  raise Exception(message)
671 
672  # Put mixing parameters in a dictionary
673  if '.' in pileupSpec:
674  mixingDict={'file':pileupSpec}
675  elif pileupSpec.startswith('file:'):
676  mixingDict={'file':pileupSpec[5:]}
677  else:
678  import copy
679  mixingDict=copy.copy(Mixing[pileupSpec])
680  if len(self._options.pileup.split(','))>1:
681  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
682 
683  # Load the pu cfg file corresponding to the requested pu scenario
684  if 'file:' in pileupSpec:
685  #the file is local
686  self.process.load(mixingDict['file'])
687  print "inlining mixing module configuration"
688  self._options.inlineObjets+=',mix'
689  else:
690  self.loadAndRemember(mixingDict['file'])
691 
692  # FastSim: transform cfg of MixingModule from FullSim to FastSim
693  if self._options.fast:
694  if GEN_mixing:
695  self._options.customisation_file.insert(0,"FastSimulation/Configuration/MixingModule_Full2Fast.prepareGenMixing")
696  else:
697  self._options.customisation_file.insert(0,"FastSimulation/Configuration/MixingModule_Full2Fast.prepareDigiRecoMixing")
698 
699  mixingDict.pop('file')
700  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
701  if self._options.pileup_input:
702  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
703  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],))[0]
704  else:
705  mixingDict['F']=self._options.pileup_input.split(',')
706  specialization=defineMixing(mixingDict)
707  for command in specialization:
708  self.executeAndRemember(command)
709  if len(mixingDict)!=0:
710  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
711 
712 
713  # load the geometry file
714  try:
715  if len(self.stepMap):
716  self.loadAndRemember(self.GeometryCFF)
717  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
718  self.loadAndRemember(self.SimGeometryCFF)
719  if self.geometryDBLabel:
720  self.executeAndRemember('process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
721  except ImportError:
722  print "Geometry option",self._options.geometry,"unknown."
723  raise
724 
725  if len(self.stepMap):
726  self.loadAndRemember(self.magFieldCFF)
727 
728  for stepName in self.stepKeys:
729  stepSpec = self.stepMap[stepName]
730  print "Step:", stepName,"Spec:",stepSpec
731  if stepName.startswith('re'):
732  ##add the corresponding input content
733  if stepName[2:] not in self._options.donotDropOnInput:
734  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
735  stepName=stepName[2:]
736  if stepSpec=="":
737  getattr(self,"prepare_"+stepName)(sequence = getattr(self,stepName+"DefaultSeq"))
738  elif type(stepSpec)==list:
739  getattr(self,"prepare_"+stepName)(sequence = '+'.join(stepSpec))
740  elif type(stepSpec)==tuple:
741  getattr(self,"prepare_"+stepName)(sequence = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
742  else:
743  raise ValueError("Invalid step definition")
744 
745  if self._options.restoreRNDSeeds!=False:
746  #it is either True, or a process name
747  if self._options.restoreRNDSeeds==True:
748  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
749  else:
750  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
751  if self._options.inputEventContent or self._options.inputCommands:
752  if self._options.inputCommands:
753  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
754  else:
755  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
756 
757 
759  if self._options.inputEventContent:
760  import copy
761  def dropSecondDropStar(iec):
762  #drop occurence of 'drop *' in the list
763  count=0
764  for item in iec:
765  if item=='drop *':
766  if count!=0:
767  iec.remove(item)
768  count+=1
769 
770 
771  ## allow comma separated input eventcontent
772  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
773  for evct in self._options.inputEventContent.split(','):
774  if evct=='': continue
775  theEventContent = getattr(self.process, evct+"EventContent")
776  if hasattr(theEventContent,'outputCommands'):
777  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
778  if hasattr(theEventContent,'inputCommands'):
779  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
780 
781  dropSecondDropStar(self.process.source.inputCommands)
782 
783  if not self._options.dropDescendant:
784  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
785 
786 
787  return
788 
789  def addConditions(self):
790  """Add conditions to the process"""
791  if not self._options.conditions: return
792 
793  if 'FrontierConditions_GlobalTag' in self._options.conditions:
794  print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line'
795  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
796 
797  self.loadAndRemember(self.ConditionsDefaultCFF)
798 
799  if self._options.useCondDBv1:
800  from Configuration.AlCa.GlobalTag_condDBv1 import GlobalTag
801  else:
802  from Configuration.AlCa.GlobalTag import GlobalTag
803 
804  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
805 
806  if self._options.useCondDBv1:
807  self.additionalCommands.append('from Configuration.AlCa.GlobalTag_condDBv1 import GlobalTag')
808  else:
809  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
810 
811  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
812 
813  if self._options.slhc:
814  self.loadAndRemember("SLHCUpgradeSimulations/Geometry/fakeConditions_%s_cff"%(self._options.slhc,))
815 
816 
817  def addCustomise(self,unsch=0):
818  """Include the customise code """
819 
820  custOpt=[]
821  if unsch==0:
822  for c in self._options.customisation_file:
823  custOpt.extend(c.split(","))
824  else:
825  for c in self._options.customisation_file_unsch:
826  custOpt.extend(c.split(","))
827 
829  for opt in custOpt:
830  if opt=='': continue
831  if opt.count('.')>1:
832  raise Exception("more than . in the specification:"+opt)
833  fileName=opt.split('.')[0]
834  if opt.count('.')==0: rest='customise'
835  else:
836  rest=opt.split('.')[1]
837  if rest=='py': rest='customise' #catch the case of --customise file.py
838 
839  if fileName in custMap:
840  custMap[fileName].extend(rest.split('+'))
841  else:
842  custMap[fileName]=rest.split('+')
843 
844  if len(custMap)==0:
845  final_snippet='\n'
846  else:
847  final_snippet='\n# customisation of the process.\n'
848 
849  allFcn=[]
850  for opt in custMap:
851  allFcn.extend(custMap[opt])
852  for fcn in allFcn:
853  if allFcn.count(fcn)!=1:
854  raise Exception("cannot specify twice "+fcn+" as a customisation method")
855 
856  for f in custMap:
857  # let python search for that package and do syntax checking at the same time
858  packageName = f.replace(".py","").replace("/",".")
859  __import__(packageName)
860  package = sys.modules[packageName]
861 
862  # now ask the package for its definition and pick .py instead of .pyc
863  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
864 
865  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
866  if self._options.inline_custom:
867  for line in file(customiseFile,'r'):
868  if "import FWCore.ParameterSet.Config" in line:
869  continue
870  final_snippet += line
871  else:
872  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
873  for fcn in custMap[f]:
874  print "customising the process with",fcn,"from",f
875  if not hasattr(package,fcn):
876  #bound to fail at run time
877  raise Exception("config "+f+" has no function "+fcn)
878  #execute the command
879  self.process=getattr(package,fcn)(self.process)
880  #and print it in the configuration
881  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
882  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
883 
884  if len(custMap)!=0:
885  final_snippet += '\n# End of customisation functions\n'
886 
887  ### now for a useful command
888  if unsch==1 or not self._options.runUnscheduled:
889  if self._options.customise_commands:
890  import string
891  final_snippet +='\n# Customisation from command line'
892  for com in self._options.customise_commands.split('\\n'):
893  com=string.lstrip(com)
894  self.executeAndRemember(com)
895  final_snippet +='\n'+com
896 
897  return final_snippet
898 
899  #----------------------------------------------------------------------------
900  # here the methods to define the python includes for each step or
901  # conditions
902  #----------------------------------------------------------------------------
903  def define_Configs(self):
904  if len(self.stepMap):
905  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
906  if self._options.particleTable not in defaultOptions.particleTableList:
907  print 'Invalid particle table provided. Options are:'
908  print defaultOptions.particleTable
909  sys.exit(-1)
910  else:
911  if len(self.stepMap):
912  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
913 
914  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
915 
916  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
917  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
918  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
919  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
920  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
921  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
922  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
923  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
924  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
925  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
926  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
927  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
928  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
929  self.EIDefaultCFF=None
930  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
931  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
932  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
933  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
934  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
935  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
936  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
937  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
938  if self._options.useCondDBv1:
939  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_condDBv1_cff"
940  else:
941  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
942  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
943  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
944 
945  if "DATAMIX" in self.stepMap.keys():
946  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
947  if self._options.datamix == 'PreMix':
948  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDMPreMix_cff"
949  else:
950  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
951  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
952  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
953 
954  if "DIGIPREMIX" in self.stepMap.keys():
955  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_PreMix_cff"
956 
957  self.ALCADefaultSeq=None
958  self.LHEDefaultSeq='externalLHEProducer'
959  self.GENDefaultSeq='pgen'
960  self.SIMDefaultSeq='psim'
961  self.DIGIDefaultSeq='pdigi'
962  self.DIGIPREMIXDefaultSeq='pdigi'
963  self.DIGIPREMIX_S2DefaultSeq='pdigi'
964  self.DATAMIXDefaultSeq=None
965  self.DIGI2RAWDefaultSeq='DigiToRaw'
966  self.HLTDefaultSeq='GRun'
967  self.L1DefaultSeq=None
968  self.L1REPACKDefaultSeq='GT'
969  self.HARVESTINGDefaultSeq=None
970  self.ALCAHARVESTDefaultSeq=None
971  self.CFWRITERDefaultSeq=None
972  self.RAW2DIGIDefaultSeq='RawToDigi'
973  self.L1RecoDefaultSeq='L1Reco'
974  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
975  if 'RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap:
976  self.RECODefaultSeq='reconstruction'
977  else:
978  self.RECODefaultSeq='reconstruction_fromRECO'
979 
980  self.EIDefaultSeq='top'
981  self.POSTRECODefaultSeq=None
982  self.L1HwValDefaultSeq='L1HwVal'
983  self.DQMDefaultSeq='DQMOffline'
984  self.VALIDATIONDefaultSeq=''
985  self.ENDJOBDefaultSeq='endOfProcess'
986  self.REPACKDefaultSeq='DigiToRawRepack'
987  self.PATDefaultSeq='miniAOD'
988 
989  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
990 
991  if not self._options.beamspot:
992  self._options.beamspot=VtxSmearedDefaultKey
993 
994  # if its MC then change the raw2digi
995  if self._options.isMC==True:
996  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
997  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
998  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
999  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
1000  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
1001  else:
1002  self._options.beamspot = None
1003 
1004  #patch for gen, due to backward incompatibility
1005  if 'reGEN' in self.stepMap:
1006  self.GENDefaultSeq='fixGenInfo'
1007 
1008  if self._options.scenario=='cosmics':
1009  self._options.pileup='Cosmics'
1010  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1011  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1012  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1013  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1014  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1015  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1016  if self._options.isMC==True:
1017  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1018  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1019  self.RECODefaultSeq='reconstructionCosmics'
1020  self.DQMDefaultSeq='DQMOfflineCosmics'
1021 
1022  if self._options.scenario=='HeavyIons':
1023  if not self._options.beamspot:
1024  self._options.beamspot=VtxSmearedHIDefaultKey
1025  self.HLTDefaultSeq = 'HIon'
1026  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1027  self.VALIDATIONDefaultSeq=''
1028  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1029  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1030  self.RECODefaultSeq='reconstructionHeavyIons'
1031  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1032  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1033  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1034  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1035  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1036  if self._options.isMC==True:
1037  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1038 
1039 
1040  self.RAW2RECODefaultSeq=','.join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1041 
1042  self.USERDefaultSeq='user'
1043  self.USERDefaultCFF=None
1044 
1045  # the magnetic field
1046  if self._options.isData:
1047  if self._options.magField==defaultOptions.magField:
1048  print "magnetic field option forced to: AutoFromDBCurrent"
1049  self._options.magField='AutoFromDBCurrent'
1050  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1051  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1052 
1053  # the geometry
1054  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1055  self.geometryDBLabel=None
1056  simGeometry=''
1057  if self._options.fast:
1058  if 'start' in self._options.conditions.lower():
1059  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1060  else:
1061  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1062  else:
1063  def inGeometryKeys(opt):
1064  from Configuration.StandardSequences.GeometryConf import GeometryConf
1065  if opt in GeometryConf:
1066  return GeometryConf[opt]
1067  else:
1068  return opt
1069 
1070  geoms=self._options.geometry.split(',')
1071  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1072  if len(geoms)==2:
1073  #may specify the reco geometry
1074  if '/' in geoms[1] or '_cff' in geoms[1]:
1075  self.GeometryCFF=geoms[1]
1076  else:
1077  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1078 
1079  if (geoms[0].startswith('DB:')):
1080  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1081  self.geometryDBLabel=geoms[0][3:]
1082  print "with DB:"
1083  else:
1084  if '/' in geoms[0] or '_cff' in geoms[0]:
1085  self.SimGeometryCFF=geoms[0]
1086  else:
1087  simGeometry=geoms[0]
1088  if self._options.gflash==True:
1089  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1090  else:
1091  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1092 
1093  # synchronize the geometry configuration and the FullSimulation sequence to be used
1094  if simGeometry not in defaultOptions.geometryExtendedOptions:
1095  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1096 
1097  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1098  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1099  self._options.beamspot='NoSmear'
1100 
1101  # if fastsim switch event content
1102  if self._options.fast:
1103  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1104  self.SIMDefaultSeq = 'psim'
1105  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1106  self.RECODefaultSeq= 'reconstruction'
1107  self.EVTCONTDefaultCFF = "FastSimulation.Configuration.EventContent_cff"
1108  self.VALIDATIONDefaultCFF = "FastSimulation.Configuration.Validation_cff"
1109  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1110  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1111  self.DIGIDefaultCFF = 'FastSimulation.Configuration.Digi_cff'
1112  if self._options.datamix == 'PreMix':
1113  self.DIGIDefaultCFF="FastSimulation.Configuration.DigiDMPreMix_cff"
1114  if "DIGIPREMIX" in self.stepMap.keys():
1115  self.DIGIDefaultCFF="FastSimulation.Configuration.Digi_PreMix_cff"
1116  if "DATAMIX" in self.stepMap.keys():
1117  self.DATAMIXDefaultCFF="FastSimulation.Configuration.DataMixer"+self._options.datamix+"_cff"
1118 
1119  self.DIGIDefaultSeq = 'pdigi'
1120  self.L1EMDefaultCFF='FastSimulation.Configuration.SimL1Emulator_cff'
1121  self.L1RecoDefaultCFF='FastSimulation.Configuration.L1Reco_cff'
1122  self.DIGI2RAWDefaultCFF = 'FastSimulation.Configuration.DigiToRaw_cff'
1123  self.DIGI2RAWDefaultSeq = 'DigiToRaw'
1124  self.EVTCONTDefaultCFF = "FastSimulation.Configuration.EventContent_cff"
1125  self.VALIDATIONDefaultCFF = "FastSimulation.Configuration.Validation_cff"
1126 
1127 
1128 
1129  # Mixing
1130  if self._options.pileup=='default':
1131  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1132  self._options.pileup=MixingDefaultKey
1133  # temporary, until digi-reco mixing becomes standard in RelVals
1134  if self._options.fast:
1135  self._options.pileup="GEN_" + MixingDefaultKey
1136 
1137 
1138  #not driven by a default cff anymore
1139  if self._options.isData:
1140  self._options.pileup=None
1141 
1142  if self._options.slhc:
1143  self.GeometryCFF='SLHCUpgradeSimulations.Geometry.%s_cmsSimIdealGeometryXML_cff'%(self._options.slhc,)
1144  if 'stdgeom' not in self._options.slhc:
1145  self.SimGeometryCFF='SLHCUpgradeSimulations.Geometry.%s_cmsSimIdealGeometryXML_cff'%(self._options.slhc,)
1146  self.DIGIDefaultCFF='SLHCUpgradeSimulations/Geometry/Digi_%s_cff'%(self._options.slhc,)
1147  if self._options.pileup!=defaultOptions.pileup:
1148  self._options.pileup='SLHC_%s_%s'%(self._options.pileup,self._options.slhc)
1149 
1150  self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1151 
1152  # for alca, skims, etc
1153  def addExtraStream(self,name,stream,workflow='full'):
1154  # define output module and go from there
1155  output = cms.OutputModule("PoolOutputModule")
1156  if stream.selectEvents.parameters_().__len__()!=0:
1157  output.SelectEvents = stream.selectEvents
1158  else:
1159  output.SelectEvents = cms.untracked.PSet()
1160  output.SelectEvents.SelectEvents=cms.vstring()
1161  if isinstance(stream.paths,tuple):
1162  for path in stream.paths:
1163  output.SelectEvents.SelectEvents.append(path.label())
1164  else:
1165  output.SelectEvents.SelectEvents.append(stream.paths.label())
1166 
1167 
1168 
1169  if isinstance(stream.content,str):
1170  evtPset=getattr(self.process,stream.content)
1171  for p in evtPset.parameters_():
1172  setattr(output,p,getattr(evtPset,p))
1173  if not self._options.inlineEventContent:
1174  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1175  return label
1176  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1177  else:
1178  output.outputCommands = stream.content
1179 
1180 
1181  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1182 
1183  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1184  filterName = cms.untracked.string(stream.name))
1185 
1186  if self._options.filtername:
1187  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1188 
1189  #add an automatic flushing to limit memory consumption
1190  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1191 
1192  if workflow in ("producers,full"):
1193  if isinstance(stream.paths,tuple):
1194  for path in stream.paths:
1195  self.schedule.append(path)
1196  else:
1197  self.schedule.append(stream.paths)
1198 
1199 
1200  # in case of relvals we don't want to have additional outputs
1201  if (not self._options.relval) and workflow in ("full","output"):
1202  self.additionalOutputs[name] = output
1203  setattr(self.process,name,output)
1204 
1205  if workflow == 'output':
1206  # adjust the select events to the proper trigger results from previous process
1207  filterList = output.SelectEvents.SelectEvents
1208  for i, filter in enumerate(filterList):
1209  filterList[i] = filter+":"+self._options.triggerResultsProcess
1210 
1211  return output
1212 
1213  #----------------------------------------------------------------------------
1214  # here the methods to create the steps. Of course we are doing magic here ;)
1215  # prepare_STEPNAME modifies self.process and what else's needed.
1216  #----------------------------------------------------------------------------
1217 
1218  def loadDefaultOrSpecifiedCFF(self, sequence,defaultCFF,unsch=0):
1219  if ( len(sequence.split('.'))==1 ):
1220  l=self.loadAndRemember(defaultCFF,unsch)
1221  elif ( len(sequence.split('.'))==2 ):
1222  l=self.loadAndRemember(sequence.split('.')[0],unsch)
1223  sequence=sequence.split('.')[1]
1224  else:
1225  print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a"
1226  print sequence,"not recognized"
1227  raise
1228  return l
1229 
1230  def scheduleSequence(self,seq,prefix,what='Path'):
1231  if '*' in seq:
1232  #create only one path with all sequences in it
1233  for i,s in enumerate(seq.split('*')):
1234  if i==0:
1235  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1236  else:
1237  p=getattr(self.process,prefix)
1238  p+=getattr(self.process, s)
1239  self.schedule.append(getattr(self.process,prefix))
1240  return
1241  else:
1242  #create as many path as many sequences
1243  if not '+' in seq:
1244  if self.nextScheduleIsConditional:
1245  self.conditionalPaths.append(prefix)
1246  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1247  self.schedule.append(getattr(self.process,prefix))
1248  else:
1249  for i,s in enumerate(seq.split('+')):
1250  sn=prefix+'%d'%(i)
1251  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1252  self.schedule.append(getattr(self.process,sn))
1253  return
1254 
1255  def scheduleSequenceAtEnd(self,seq,prefix):
1256  self.scheduleSequence(seq,prefix,what='EndPath')
1257  return
1258 
1259  def prepare_ALCAPRODUCER(self, sequence = None):
1260  self.prepare_ALCA(sequence, workflow = "producers")
1261 
1262  def prepare_ALCAOUTPUT(self, sequence = None):
1263  self.prepare_ALCA(sequence, workflow = "output")
1264 
1265  def prepare_ALCA(self, sequence = None, workflow = 'full'):
1266  """ Enrich the process with alca streams """
1267  print 'DL enriching',workflow,sequence
1268  alcaConfig=self.loadDefaultOrSpecifiedCFF(sequence,self.ALCADefaultCFF)
1269  sequence = sequence.split('.')[-1]
1270 
1271  # decide which ALCA paths to use
1272  alcaList = sequence.split("+")
1273  maxLevel=0
1274  from Configuration.AlCa.autoAlca import autoAlca
1275  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1276  self.expandMapping(alcaList,autoAlca)
1277  self.AlCaPaths=[]
1278  for name in alcaConfig.__dict__:
1279  alcastream = getattr(alcaConfig,name)
1280  shortName = name.replace('ALCARECOStream','')
1281  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1282  output = self.addExtraStream(name,alcastream, workflow = workflow)
1283  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1284  self.AlCaPaths.append(shortName)
1285  if 'DQM' in alcaList:
1286  if not self._options.inlineEventContent and hasattr(self.process,name):
1287  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1288  else:
1289  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1290 
1291  #rename the HLT process name in the alca modules
1292  if self._options.hltProcess or 'HLT' in self.stepMap:
1293  if isinstance(alcastream.paths,tuple):
1294  for path in alcastream.paths:
1295  self.renameHLTprocessInSequence(path.label())
1296  else:
1297  self.renameHLTprocessInSequence(alcastream.paths.label())
1298 
1299  for i in range(alcaList.count(shortName)):
1300  alcaList.remove(shortName)
1301 
1302  # DQM needs a special handling
1303  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1304  path = getattr(alcaConfig,name)
1305  self.schedule.append(path)
1306  alcaList.remove('DQM')
1307 
1308  if isinstance(alcastream,cms.Path):
1309  #black list the alca path so that they do not appear in the cfg
1310  self.blacklist_paths.append(alcastream)
1311 
1312 
1313  if len(alcaList) != 0:
1314  available=[]
1315  for name in alcaConfig.__dict__:
1316  alcastream = getattr(alcaConfig,name)
1317  if isinstance(alcastream,cms.FilteredStream):
1318  available.append(name.replace('ALCARECOStream',''))
1319  print "The following alcas could not be found "+str(alcaList)
1320  print "available ",available
1321  #print "verify your configuration, ignoring for now"
1322  raise Exception("The following alcas could not be found "+str(alcaList))
1323 
1324  def prepare_LHE(self, sequence = None):
1325  #load the fragment
1326  ##make it loadable
1327  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1328  print "Loading lhe fragment from",loadFragment
1329  __import__(loadFragment)
1330  self.process.load(loadFragment)
1331  ##inline the modules
1332  self._options.inlineObjets+=','+sequence
1333 
1334  getattr(self.process,sequence).nEvents = int(self._options.number)
1335 
1336  #schedule it
1337  self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1338  self.excludedPaths.append("lhe_step")
1339  self.schedule.append( self.process.lhe_step )
1340 
1341  def prepare_GEN(self, sequence = None):
1342  """ load the fragment of generator configuration """
1343  loadFailure=False
1344  #remove trailing .py
1345  #support old style .cfi by changing into something.cfi into something_cfi
1346  #remove python/ from the name
1347  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1348  #standard location of fragments
1349  if not '/' in loadFragment:
1350  loadFragment='Configuration.Generator.'+loadFragment
1351  else:
1352  loadFragment=loadFragment.replace('/','.')
1353  try:
1354  print "Loading generator fragment from",loadFragment
1355  __import__(loadFragment)
1356  except:
1357  loadFailure=True
1358  #if self.process.source and self.process.source.type_()=='EmptySource':
1359  if not (self._options.filein or self._options.dasquery):
1360  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1361 
1362  if not loadFailure:
1363  generatorModule=sys.modules[loadFragment]
1364  genModules=generatorModule.__dict__
1365  #remove lhe producer module since this should have been
1366  #imported instead in the LHE step
1367  if self.LHEDefaultSeq in genModules:
1368  del genModules[self.LHEDefaultSeq]
1369 
1370  if self._options.hideGen:
1371  self.loadAndRemember(loadFragment)
1372  else:
1373  self.process.load(loadFragment)
1374  # expose the objects from that fragment to the configuration
1375  import FWCore.ParameterSet.Modules as cmstypes
1376  for name in genModules:
1377  theObject = getattr(generatorModule,name)
1378  if isinstance(theObject, cmstypes._Module):
1379  self._options.inlineObjets=name+','+self._options.inlineObjets
1380  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1381  self._options.inlineObjets+=','+name
1382 
1383  if sequence == self.GENDefaultSeq or sequence == 'pgen_genonly':
1384  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1385  self.productionFilterSequence = 'ProductionFilterSequence'
1386  elif 'generator' in genModules:
1387  self.productionFilterSequence = 'generator'
1388 
1389  """ Enrich the schedule with the rest of the generation step """
1390  self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1391  genSeqName=sequence.split('.')[-1]
1392 
1393  if True:
1394  try:
1395  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1396  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1397  self.loadAndRemember(cffToBeLoaded)
1398  except ImportError:
1399  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1400 
1401  if self._options.scenario == 'HeavyIons':
1402  if self._options.pileup=='HiMixGEN':
1403  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1404  else:
1405  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1406 
1407  self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1408  self.schedule.append(self.process.generation_step)
1409 
1410  #register to the genstepfilter the name of the path (static right now, but might evolve)
1411  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1412 
1413  if 'reGEN' in self.stepMap:
1414  #stop here
1415  return
1416 
1417  """ Enrich the schedule with the summary of the filter step """
1418  #the gen filter in the endpath
1419  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1420  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1421  return
1422 
1423  def prepare_SIM(self, sequence = None):
1424  """ Enrich the schedule with the simulation step"""
1425  self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1426  if not self._options.fast:
1427  if self._options.gflash==True:
1428  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1429 
1430  if self._options.magField=='0T':
1431  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1432  else:
1433  if self._options.magField=='0T':
1434  self.executeAndRemember("process.famosSimHits.UseMagneticField = cms.bool(False)")
1435 
1436  self.scheduleSequence(sequence.split('.')[-1],'simulation_step')
1437  return
1438 
1439  def prepare_DIGI(self, sequence = None):
1440  """ Enrich the schedule with the digitisation step"""
1441  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1442 
1443  if self._options.gflash==True:
1444  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1445 
1446  if sequence == 'pdigi_valid':
1447  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1448 
1449  if sequence != 'pdigi_nogen' and sequence != 'pdigi_valid_nogen' and not self.process.source.type_()=='EmptySource':
1450  if self._options.inputEventContent=='':
1451  self._options.inputEventContent='REGEN'
1452  else:
1453  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1454 
1455 
1456  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1457  return
1458 
1459  def prepare_DIGIPREMIX(self, sequence = None):
1460  """ Enrich the schedule with the digitisation step"""
1461  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1462 
1463  self.loadAndRemember("SimGeneral/MixingModule/digi_noNoise_cfi")
1464 
1465  if sequence == 'pdigi_valid':
1466  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1467  else:
1468  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1469 
1470  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1471  return
1472 
1473  def prepare_DIGIPREMIX_S2(self, sequence = None):
1474  """ Enrich the schedule with the digitisation step"""
1475  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1476 
1477  self.loadAndRemember("SimGeneral/MixingModule/digi_MixPreMix_cfi")
1478 
1479 
1480  if sequence == 'pdigi_valid':
1481  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1482  else:
1483  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1484 
1485  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1486  return
1487 
1488  def prepare_CFWRITER(self, sequence = None):
1489  """ Enrich the schedule with the crossing frame writer step"""
1490  self.loadAndRemember(self.CFWRITERDefaultCFF)
1491  self.scheduleSequence('pcfw','cfwriter_step')
1492  return
1493 
1494  def prepare_DATAMIX(self, sequence = None):
1495  """ Enrich the schedule with the digitisation step"""
1496  self.loadAndRemember(self.DATAMIXDefaultCFF)
1497  self.scheduleSequence('pdatamix','datamixing_step')
1498 
1499  if self._options.pileup_input:
1500  theFiles=''
1501  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1502  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],))[0]
1503  elif self._options.pileup_input.startswith("filelist:"):
1504  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1505  else:
1506  theFiles=self._options.pileup_input.split(',')
1507  #print theFiles
1508  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1509 
1510  return
1511 
1512  def prepare_DIGI2RAW(self, sequence = None):
1513  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGI2RAWDefaultCFF)
1514  self.scheduleSequence(sequence.split('.')[-1],'digi2raw_step')
1515  if "DIGIPREMIX" in self.stepMap.keys():
1516  self.executeAndRemember("process.esDigiToRaw.Label = cms.string('mix')") ##terrible hack - bypass zero suppression
1517  self.executeAndRemember("process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')") ##special readout mode for StripTracker
1518 
1519  return
1520 
1521  def prepare_REPACK(self, sequence = None):
1522  self.loadDefaultOrSpecifiedCFF(sequence,self.REPACKDefaultCFF)
1523  self.scheduleSequence(sequence.split('.')[-1],'digi2repack_step')
1524  return
1525 
1526  def prepare_L1(self, sequence = None):
1527  """ Enrich the schedule with the L1 simulation step"""
1528  assert(sequence == None)
1529  self.loadAndRemember(self.L1EMDefaultCFF)
1530  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1531  return
1532 
1533  def prepare_L1REPACK(self, sequence = None):
1534  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1535  supported = ['GT','GT1','GT2','GCTGT']
1536  if sequence in supported:
1537  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1538  if self._options.scenario == 'HeavyIons':
1539  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1540  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1541  else:
1542  print "L1REPACK with '",sequence,"' is not supported! Supported choices are: ",supported
1543  raise Exception('unsupported feature')
1544 
1545 
1546  def prepare_HLT(self, sequence = None):
1547  """ Enrich the schedule with the HLT simulation step"""
1548  if not sequence:
1549  print "no specification of the hlt menu has been given, should never happen"
1550  raise Exception('no HLT sequence provided')
1551 
1552  if '@' in sequence:
1553  # case where HLT:@something was provided
1554  from Configuration.HLT.autoHLT import autoHLT
1555  key = sequence[1:]
1556  if key in autoHLT:
1557  sequence = autoHLT[key]
1558  else:
1559  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1560 
1561  if ',' in sequence:
1562  #case where HLT:something:something was provided
1563  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1564  optionsForHLT = {}
1565  if self._options.scenario == 'HeavyIons':
1566  optionsForHLT['type'] = 'HIon'
1567  else:
1568  optionsForHLT['type'] = 'GRun'
1569  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.iteritems())
1570  if sequence == 'run,fromSource':
1571  if hasattr(self.process.source,'firstRun'):
1572  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1573  elif hasattr(self.process.source,'setRunNumber'):
1574  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1575  else:
1576  raise Exception('Cannot replace menu to load %s'%(sequence))
1577  else:
1578  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(sequence.replace(',',':'),optionsForHLTConfig))
1579  else:
1580  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % sequence)
1581 
1582  if self._options.isMC:
1583  if self._options.fast:
1584  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFastSim")
1585  else:
1586  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFullSim")
1587 
1588  if self._options.name != 'HLT':
1589  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1590  self.additionalCommands.append('process = ProcessName(process)')
1591  self.additionalCommands.append('')
1592  from HLTrigger.Configuration.CustomConfigs import ProcessName
1593  self.process = ProcessName(self.process)
1594 
1595  self.schedule.append(self.process.HLTSchedule)
1596  [self.blacklist_paths.append(path) for path in self.process.HLTSchedule if isinstance(path,(cms.Path,cms.EndPath))]
1597 
1598  #this is a fake, to be removed with fastim migration and HLT menu dump
1599  if self._options.fast:
1600  if not hasattr(self.process,'HLTEndSequence'):
1601  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1602 
1603 
1604  def prepare_RAW2RECO(self, sequence = None):
1605  if ','in sequence:
1606  seqReco=sequence.split(',')[1]
1607  seqDigi=sequence.split(',')[0]
1608  else:
1609  print "RAW2RECO requires two specifications",sequence,"insufficient"
1610 
1611  self.prepare_RAW2DIGI(seqDigi)
1612  self.prepare_RECO(seqReco)
1613  return
1614 
1615  def prepare_RAW2DIGI(self, sequence = "RawToDigi"):
1616  self.loadDefaultOrSpecifiedCFF(sequence,self.RAW2DIGIDefaultCFF)
1617  self.scheduleSequence(sequence,'raw2digi_step')
1618  # if self._options.isRepacked:
1619  #self.renameInputTagsInSequence(sequence)
1620  return
1621 
1622  def prepare_L1HwVal(self, sequence = 'L1HwVal'):
1623  ''' Enrich the schedule with L1 HW validation '''
1624  self.loadDefaultOrSpecifiedCFF(sequence,self.L1HwValDefaultCFF)
1625  #self.scheduleSequence(sequence.split('.')[-1],'l1hwval_step')
1626  print '\n\n\n DEPRECATED this has no action \n\n\n'
1627  return
1628 
1629  def prepare_L1Reco(self, sequence = "L1Reco"):
1630  ''' Enrich the schedule with L1 reconstruction '''
1631  self.loadDefaultOrSpecifiedCFF(sequence,self.L1RecoDefaultCFF)
1632  self.scheduleSequence(sequence.split('.')[-1],'L1Reco_step')
1633  return
1634 
1635  def prepare_L1TrackTrigger(self, sequence = "L1TrackTrigger"):
1636  ''' Enrich the schedule with L1 reconstruction '''
1638  self.scheduleSequence(sequence.split('.')[-1],'L1TrackTrigger_step')
1639  return
1640 
1641  def prepare_FILTER(self, sequence = None):
1642  ''' Enrich the schedule with a user defined filter sequence '''
1643  ## load the relevant part
1644  filterConfig=self.load(sequence.split('.')[0])
1645  filterSeq=sequence.split('.')[-1]
1646  ## print it in the configuration
1648  def __init__(self):
1649  self.inliner=''
1650  pass
1651  def enter(self,visitee):
1652  try:
1653  label=visitee.label()
1654  ##needs to be in reverse order
1655  self.inliner=label+','+self.inliner
1656  except:
1657  pass
1658  def leave(self,v): pass
1659 
1660  expander=PrintAllModules()
1661  getattr(self.process,filterSeq).visit( expander )
1662  self._options.inlineObjets+=','+expander.inliner
1663  self._options.inlineObjets+=','+filterSeq
1664 
1665  ## put the filtering path in the schedule
1666  self.scheduleSequence(filterSeq,'filtering_step')
1667  self.nextScheduleIsConditional=True
1668  ## put it before all the other paths
1669  self.productionFilterSequence = filterSeq
1670 
1671  return
1672 
1673  def prepare_RECO(self, sequence = "reconstruction"):
1674  ''' Enrich the schedule with reconstruction '''
1675  self.loadDefaultOrSpecifiedCFF(sequence,self.RECODefaultCFF)
1676  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_step')
1677  return
1678 
1679  def prepare_RECOBEFMIX(self, sequence = "reconstruction"):
1680  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1681  if not self._options.fast:
1682  print "ERROR: this step is only implemented for FastSim"
1683  sys.exit()
1684  self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1685  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_befmix_step')
1686  return
1687 
1688  def prepare_PAT(self, sequence = "miniAOD"):
1689  ''' Enrich the schedule with PAT '''
1690  self.loadDefaultOrSpecifiedCFF(sequence,self.PATDefaultCFF,1) #this is unscheduled
1691  if not self._options.runUnscheduled:
1692  raise Exception("MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1693  if self._options.isData:
1694  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1695  else:
1696  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1697  if self._options.fast:
1698  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff.miniAOD_customizeMETFiltersFastSim")
1699  return
1700 
1701  def prepare_EI(self, sequence = None):
1702  ''' Enrich the schedule with event interpretation '''
1703  from Configuration.StandardSequences.EventInterpretation import EventInterpretation
1704  if sequence in EventInterpretation:
1705  self.EIDefaultCFF = EventInterpretation[sequence]
1706  sequence = 'EIsequence'
1707  else:
1708  raise Exception('Cannot set %s event interpretation'%( sequence) )
1709  self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1710  self.scheduleSequence(sequence.split('.')[-1],'eventinterpretaion_step')
1711  return
1712 
1713  def prepare_SKIM(self, sequence = "all"):
1714  ''' Enrich the schedule with skimming fragments'''
1715  skimConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.SKIMDefaultCFF)
1716  sequence = sequence.split('.')[-1]
1717 
1718  skimlist=sequence.split('+')
1719  ## support @Mu+DiJet+@Electron configuration via autoSkim.py
1720  from Configuration.Skimming.autoSkim import autoSkim
1721  self.expandMapping(skimlist,autoSkim)
1722 
1723  #print "dictionnary for skims:",skimConfig.__dict__
1724  for skim in skimConfig.__dict__:
1725  skimstream = getattr(skimConfig,skim)
1726  if isinstance(skimstream,cms.Path):
1727  #black list the alca path so that they do not appear in the cfg
1728  self.blacklist_paths.append(skimstream)
1729  if (not isinstance(skimstream,cms.FilteredStream)):
1730  continue
1731  shortname = skim.replace('SKIMStream','')
1732  if (sequence=="all"):
1733  self.addExtraStream(skim,skimstream)
1734  elif (shortname in skimlist):
1735  self.addExtraStream(skim,skimstream)
1736  #add a DQM eventcontent for this guy
1737  if self._options.datatier=='DQM':
1738  self.process.load(self.EVTCONTDefaultCFF)
1739  skimstreamDQM = cms.FilteredStream(
1740  responsible = skimstream.responsible,
1741  name = skimstream.name+'DQM',
1742  paths = skimstream.paths,
1743  selectEvents = skimstream.selectEvents,
1744  content = self._options.datatier+'EventContent',
1745  dataTier = cms.untracked.string(self._options.datatier)
1746  )
1747  self.addExtraStream(skim+'DQM',skimstreamDQM)
1748  for i in range(skimlist.count(shortname)):
1749  skimlist.remove(shortname)
1750 
1751 
1752 
1753  if (skimlist.__len__()!=0 and sequence!="all"):
1754  print 'WARNING, possible typo with SKIM:'+'+'.join(skimlist)
1755  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1756 
1757  def prepare_USER(self, sequence = None):
1758  ''' Enrich the schedule with a user defined sequence '''
1759  self.loadDefaultOrSpecifiedCFF(sequence,self.USERDefaultCFF)
1760  self.scheduleSequence(sequence.split('.')[-1],'user_step')
1761  return
1762 
1763  def prepare_POSTRECO(self, sequence = None):
1764  """ Enrich the schedule with the postreco step """
1765  self.loadAndRemember(self.POSTRECODefaultCFF)
1766  self.scheduleSequence('postreco_generator','postreco_step')
1767  return
1768 
1769 
1770  def prepare_VALIDATION(self, sequence = 'validation'):
1771  print sequence,"in preparing validation"
1772  self.loadDefaultOrSpecifiedCFF(sequence,self.VALIDATIONDefaultCFF)
1773  from Validation.Configuration.autoValidation import autoValidation
1774  #in case VALIDATION:something:somethingelse -> something,somethingelse
1775  sequence=sequence.split('.')[-1]
1776  if sequence.find(',')!=-1:
1777  prevalSeqName=sequence.split(',')[0].split('+')
1778  valSeqName=sequence.split(',')[1].split('+')
1779  self.expandMapping(prevalSeqName,autoValidation,index=0)
1780  self.expandMapping(valSeqName,autoValidation,index=1)
1781  else:
1782  if '@' in sequence:
1783  prevalSeqName=sequence.split('+')
1784  valSeqName=sequence.split('+')
1785  self.expandMapping(prevalSeqName,autoValidation,index=0)
1786  self.expandMapping(valSeqName,autoValidation,index=1)
1787  else:
1788  postfix=''
1789  if sequence:
1790  postfix='_'+sequence
1791  prevalSeqName=['prevalidation'+postfix]
1792  valSeqName=['validation'+postfix]
1793  if not hasattr(self.process,valSeqName[0]):
1794  prevalSeqName=['']
1795  valSeqName=[sequence]
1796 
1797  def NFI(index):
1798  ##name from index, required to keep backward compatibility
1799  if index==0:
1800  return ''
1801  else:
1802  return '%s'%index
1803 
1804  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1805  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1806  self._options.restoreRNDSeeds=True
1807 
1808  #rename the HLT process in validation steps
1809  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1810  for s in valSeqName+prevalSeqName:
1811  if s:
1813  for (i,s) in enumerate(prevalSeqName):
1814  if s:
1815  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1816  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1817 
1818  for (i,s) in enumerate(valSeqName):
1819  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1820  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1821 
1822  if not 'DIGI' in self.stepMap and not self._options.fast:
1823  self.executeAndRemember("process.mix.playback = True")
1824  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1825  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1826  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1827 
1828  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1829  #will get in the schedule, smoothly
1830  for (i,s) in enumerate(valSeqName):
1831  getattr(self.process,'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,'validation_step%s'%NFI(i))._seq
1832 
1833  return
1834 
1835 
1837  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1838  It will climb down within PSets, VPSets and VInputTags to find its target"""
1839  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1840  self._paramReplace = paramReplace
1841  self._paramSearch = paramSearch
1842  self._verbose = verbose
1843  self._whitelist = whitelist
1844 
1845  def doIt(self,pset,base):
1846  if isinstance(pset, cms._Parameterizable):
1847  for name in pset.parameters_().keys():
1848  # skip whitelisted parameters
1849  if name in self._whitelist:
1850  continue
1851  # if I use pset.parameters_().items() I get copies of the parameter values
1852  # so I can't modify the nested pset
1853  value = getattr(pset,name)
1854  type = value.pythonTypeName()
1855  if type in ('cms.PSet', 'cms.untracked.PSet'):
1856  self.doIt(value,base+"."+name)
1857  elif type in ('cms.VPSet', 'cms.untracked.VPSet'):
1858  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1859  elif type in ('cms.string', 'cms.untracked.string'):
1860  if value.value() == self._paramSearch:
1861  if self._verbose: print "set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace)
1862  setattr(pset, name,self._paramReplace)
1863  elif type in ('cms.VInputTag', 'cms.untracked.VInputTag'):
1864  for (i,n) in enumerate(value):
1865  if not isinstance(n, cms.InputTag):
1866  n=cms.InputTag(n)
1867  if n.processName == self._paramSearch:
1868  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1869  if self._verbose:print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace)
1870  setattr(n,"processName",self._paramReplace)
1871  value[i]=n
1872  elif type in ('cms.vstring', 'cms.untracked.vstring'):
1873  for (i,n) in enumerate(value):
1874  if n==self._paramSearch:
1875  getattr(pset,name)[i]=self._paramReplace
1876  elif type in ('cms.InputTag', 'cms.untracked.InputTag'):
1877  if value.processName == self._paramSearch:
1878  if self._verbose: print "set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace)
1879  setattr(getattr(pset, name),"processName",self._paramReplace)
1880 
1881  def enter(self,visitee):
1882  label = ''
1883  try:
1884  label = visitee.label()
1885  except AttributeError:
1886  label = '<Module not in a Process>'
1887  except:
1888  label = 'other execption'
1889  self.doIt(visitee, label)
1890 
1891  def leave(self,visitee):
1892  pass
1893 
1894  #visit a sequence to repalce all input tags
1895  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1896  print "Replacing all InputTag %s => %s"%(oldT,newT)
1897  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1898  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1899  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1900  if not loadMe in self.additionalCommands:
1901  self.additionalCommands.append(loadMe)
1902  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1903 
1904  #change the process name used to address HLT results in any sequence
1905  def renameHLTprocessInSequence(self,sequence,proc=None,HLTprocess='HLT'):
1906  if self._options.hltProcess:
1907  proc=self._options.hltProcess
1908  else:
1909  proc=self.process.name_()
1910  if proc==HLTprocess: return
1911  # look up all module in dqm sequence
1912  print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1913  getattr(self.process,sequence).visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess,proc,whitelist = ("subSystemFolder",)))
1914  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1915  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1916  self.additionalCommands.append('process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1917 
1918 
1919  def expandMapping(self,seqList,mapping,index=None):
1920  maxLevel=20
1921  level=0
1922  while '@' in repr(seqList) and level<maxLevel:
1923  level+=1
1924  for specifiedCommand in seqList:
1925  if specifiedCommand.startswith('@'):
1926  location=specifiedCommand[1:]
1927  if not location in mapping:
1928  raise Exception("Impossible to map "+location+" from "+repr(mapping))
1929  mappedTo=mapping[location]
1930  if index!=None:
1931  mappedTo=mappedTo[index]
1932  seqList.remove(specifiedCommand)
1933  seqList.extend(mappedTo.split('+'))
1934  break;
1935  if level==maxLevel:
1936  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
1937 
1938  def prepare_DQM(self, sequence = 'DQMOffline'):
1939  # this one needs replacement
1940 
1941  self.loadDefaultOrSpecifiedCFF(sequence,self.DQMOFFLINEDefaultCFF)
1942  sequenceList=sequence.split('.')[-1].split('+')
1943  from DQMOffline.Configuration.autoDQM import autoDQM
1944  self.expandMapping(sequenceList,autoDQM,index=0)
1945 
1946  if len(set(sequenceList))!=len(sequenceList):
1947  sequenceList=list(set(sequenceList))
1948  print "Duplicate entries for DQM:, using",sequenceList
1949  pathName='dqmoffline_step'
1950 
1951  for (i,sequence) in enumerate(sequenceList):
1952  if (i!=0):
1953  pathName='dqmoffline_%d_step'%(i)
1954 
1955  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
1956  self.renameHLTprocessInSequence(sequence)
1957 
1958  # if both HLT and DQM are run in the same process, schedule [HLT]DQM in an EndPath
1959  if 'HLT' in self.stepMap.keys():
1960  # need to put [HLT]DQM in an EndPath, to access the HLT trigger results
1961  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1962  else:
1963  # schedule DQM as a standard Path
1964  setattr(self.process,pathName, cms.Path( getattr(self.process, sequence) ) )
1965  self.schedule.append(getattr(self.process,pathName))
1966 
1967 
1968  def prepare_HARVESTING(self, sequence = None):
1969  """ Enrich the process with harvesting step """
1970  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
1971  self.loadAndRemember(self.DQMSaverCFF)
1972 
1973  harvestingConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.HARVESTINGDefaultCFF)
1974  sequence = sequence.split('.')[-1]
1975 
1976  # decide which HARVESTING paths to use
1977  harvestingList = sequence.split("+")
1978  from DQMOffline.Configuration.autoDQM import autoDQM
1979  from Validation.Configuration.autoValidation import autoValidation
1980  import copy
1981  combined_mapping = copy.deepcopy( autoDQM )
1982  combined_mapping.update( autoValidation )
1983  self.expandMapping(harvestingList,combined_mapping,index=-1)
1984 
1985  if len(set(harvestingList))!=len(harvestingList):
1986  harvestingList=list(set(harvestingList))
1987  print "Duplicate entries for HARVESTING, using",harvestingList
1988 
1989  for name in harvestingList:
1990  if not name in harvestingConfig.__dict__:
1991  print name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1992  continue
1993  harvestingstream = getattr(harvestingConfig,name)
1994  if isinstance(harvestingstream,cms.Path):
1995  self.schedule.append(harvestingstream)
1996  self.blacklist_paths.append(harvestingstream)
1997  if isinstance(harvestingstream,cms.Sequence):
1998  setattr(self.process,name+"_step",cms.Path(harvestingstream))
1999  self.schedule.append(getattr(self.process,name+"_step"))
2000 
2001  self.scheduleSequence('DQMSaver','dqmsave_step')
2002  return
2003 
2004  def prepare_ALCAHARVEST(self, sequence = None):
2005  """ Enrich the process with AlCaHarvesting step """
2006  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2007  sequence=sequence.split(".")[-1]
2008 
2009  # decide which AlcaHARVESTING paths to use
2010  harvestingList = sequence.split("+")
2011 
2012 
2013 
2014  from Configuration.AlCa.autoPCL import autoPCL
2015  self.expandMapping(harvestingList,autoPCL)
2016 
2017  for name in harvestingConfig.__dict__:
2018  harvestingstream = getattr(harvestingConfig,name)
2019  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2020  self.schedule.append(harvestingstream)
2021  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2022  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2023  harvestingList.remove(name)
2024  # append the common part at the end of the sequence
2025  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2026  self.schedule.append(lastStep)
2027 
2028  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2029  print "The following harvesting could not be found : ", harvestingList
2030  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2031 
2032 
2033 
2034  def prepare_ENDJOB(self, sequence = 'endOfProcess'):
2035  self.loadDefaultOrSpecifiedCFF(sequence,self.ENDJOBDefaultCFF)
2036  self.scheduleSequenceAtEnd(sequence.split('.')[-1],'endjob_step')
2037  return
2038 
2040  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2041  self.schedule.append(self.process.reconstruction)
2042 
2043 
2044  def build_production_info(self, evt_type, evtnumber):
2045  """ Add useful info for the production. """
2046  self.process.configurationMetadata=cms.untracked.PSet\
2047  (version=cms.untracked.string("$Revision: 1.19 $"),
2048  name=cms.untracked.string("Applications"),
2049  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2050  )
2051 
2052  self.addedObjects.append(("Production Info","configurationMetadata"))
2053 
2054 
2055  def prepare(self, doChecking = False):
2056  """ Prepare the configuration string and add missing pieces."""
2057 
2058  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2059  self.addMaxEvents()
2060  if self.with_input:
2061  self.addSource()
2062  self.addStandardSequences()
2063  ##adding standard sequences might change the inputEventContent option and therefore needs to be finalized after
2064  self.completeInputCommand()
2065  self.addConditions()
2066 
2067 
2068  outputModuleCfgCode=""
2069  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2070  outputModuleCfgCode=self.addOutput()
2071 
2072  self.addCommon()
2073 
2074  self.pythonCfgCode = "# Auto generated configuration file\n"
2075  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2076  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2077  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2078  if hasattr(self._options,"era") and self._options.era :
2079  self.pythonCfgCode += "from Configuration.StandardSequences.Eras import eras\n\n"
2080  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"'" # Start of the line, finished after the loop
2081  # Multiple eras can be specified in a comma seperated list
2082  for requestedEra in self._options.era.split(",") :
2083  self.pythonCfgCode += ",eras."+requestedEra
2084  self.pythonCfgCode += ")\n\n" # end of the line
2085  else :
2086  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"')\n\n"
2087 
2088  self.pythonCfgCode += "# import of standard configurations\n"
2089  for module in self.imports:
2090  self.pythonCfgCode += ("process.load('"+module+"')\n")
2091 
2092  # production info
2093  if not hasattr(self.process,"configurationMetadata"):
2094  self.build_production_info(self._options.evt_type, self._options.number)
2095  else:
2096  #the PSet was added via a load
2097  self.addedObjects.append(("Production Info","configurationMetadata"))
2098 
2099  self.pythonCfgCode +="\n"
2100  for comment,object in self.addedObjects:
2101  if comment!="":
2102  self.pythonCfgCode += "\n# "+comment+"\n"
2103  self.pythonCfgCode += dumpPython(self.process,object)
2104 
2105  # dump the output definition
2106  self.pythonCfgCode += "\n# Output definition\n"
2107  self.pythonCfgCode += outputModuleCfgCode
2108 
2109  # dump all additional outputs (e.g. alca or skim streams)
2110  self.pythonCfgCode += "\n# Additional output definition\n"
2111  #I do not understand why the keys are not normally ordered.
2112  nl=self.additionalOutputs.keys()
2113  nl.sort()
2114  for name in nl:
2115  output = self.additionalOutputs[name]
2116  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2117  tmpOut = cms.EndPath(output)
2118  setattr(self.process,name+'OutPath',tmpOut)
2119  self.schedule.append(tmpOut)
2120 
2121  # dump all additional commands
2122  self.pythonCfgCode += "\n# Other statements\n"
2123  for command in self.additionalCommands:
2124  self.pythonCfgCode += command + "\n"
2125 
2126  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2127  for object in self._options.inlineObjets.split(','):
2128  if not object:
2129  continue
2130  if not hasattr(self.process,object):
2131  print 'cannot inline -'+object+'- : not known'
2132  else:
2133  self.pythonCfgCode +='\n'
2134  self.pythonCfgCode +=dumpPython(self.process,object)
2135 
2136  # dump all paths
2137  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2138  for path in self.process.paths:
2139  if getattr(self.process,path) not in self.blacklist_paths:
2140  self.pythonCfgCode += dumpPython(self.process,path)
2141 
2142  for endpath in self.process.endpaths:
2143  if getattr(self.process,endpath) not in self.blacklist_paths:
2144  self.pythonCfgCode += dumpPython(self.process,endpath)
2145 
2146  # dump the schedule
2147  self.pythonCfgCode += "\n# Schedule definition\n"
2148  result = "process.schedule = cms.Schedule("
2149 
2150  # handling of the schedule
2151  self.process.schedule = cms.Schedule()
2152  for item in self.schedule:
2153  if not isinstance(item, cms.Schedule):
2154  self.process.schedule.append(item)
2155  else:
2156  self.process.schedule.extend(item)
2157 
2158  if hasattr(self.process,"HLTSchedule"):
2159  beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2160  afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2161  pathNames = ['process.'+p.label_() for p in beforeHLT]
2162  result += ','.join(pathNames)+')\n'
2163  result += 'process.schedule.extend(process.HLTSchedule)\n'
2164  pathNames = ['process.'+p.label_() for p in afterHLT]
2165  result += 'process.schedule.extend(['+','.join(pathNames)+'])\n'
2166  else:
2167  pathNames = ['process.'+p.label_() for p in self.schedule]
2168  result ='process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2169 
2170  self.pythonCfgCode += result
2171 
2172  if self._options.nThreads is not "1":
2173  self.pythonCfgCode +="\n"
2174  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2175  self.pythonCfgCode +="process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+")\n"
2176  self.pythonCfgCode +="process.options.numberOfStreams=cms.untracked.uint32(0)\n"
2177  #repacked version
2178  if self._options.isRepacked:
2179  self.pythonCfgCode +="\n"
2180  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2181  self.pythonCfgCode +="MassReplaceInputTag(process)\n"
2182  MassReplaceInputTag(self.process)
2183 
2184  # special treatment in case of production filter sequence 2/2
2185  if self.productionFilterSequence:
2186  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2187  self.pythonCfgCode +='for path in process.paths:\n'
2188  if len(self.conditionalPaths):
2189  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2190  if len(self.excludedPaths):
2191  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2192  self.pythonCfgCode +='\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2193  pfs = getattr(self.process,self.productionFilterSequence)
2194  for path in self.process.paths:
2195  if not path in self.conditionalPaths: continue
2196  if path in self.excludedPaths: continue
2197  getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2198 
2199 
2200  # dump customise fragment
2201  self.pythonCfgCode += self.addCustomise()
2202 
2203  if self._options.runUnscheduled:
2204  # prune and delete paths
2205  #this is not supporting the blacklist at this point since I do not understand it
2206  self.pythonCfgCode+="#do not add changes to your config after this point (unless you know what you are doing)\n"
2207  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import convertToUnscheduled\n"
2208  self.pythonCfgCode+="process=convertToUnscheduled(process)\n"
2209 
2210  from FWCore.ParameterSet.Utilities import convertToUnscheduled
2211  self.process=convertToUnscheduled(self.process)
2212 
2213  #now add the unscheduled stuff
2214  for module in self.importsUnsch:
2215  self.process.load(module)
2216  self.pythonCfgCode += ("process.load('"+module+"')\n")
2217 
2218  #and clean the unscheduled stuff
2219  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import cleanUnscheduled\n"
2220  self.pythonCfgCode+="process=cleanUnscheduled(process)\n"
2221 
2222  from FWCore.ParameterSet.Utilities import cleanUnscheduled
2223  self.process=cleanUnscheduled(self.process)
2224 
2225 
2226  self.pythonCfgCode += self.addCustomise(1)
2227 
2228 
2229  # make the .io file
2230 
2231  if self._options.io:
2232  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2233  if not self._options.io.endswith('.io'): self._option.io+='.io'
2234  io=open(self._options.io,'w')
2235  ioJson={}
2236  if hasattr(self.process.source,"fileNames"):
2237  if len(self.process.source.fileNames.value()):
2238  ioJson['primary']=self.process.source.fileNames.value()
2239  if hasattr(self.process.source,"secondaryFileNames"):
2240  if len(self.process.source.secondaryFileNames.value()):
2241  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2242  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2243  ioJson['pileup']=self._options.pileup_input[4:]
2244  for (o,om) in self.process.outputModules_().items():
2245  ioJson[o]=om.fileName.value()
2246  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2247  if self.productionFilterSequence:
2248  ioJson['filter']=self.productionFilterSequence
2249  import json
2250  io.write(json.dumps(ioJson))
2251  return
2252 
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:34
assert(m_qm.get())
def visit
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_RECO
put the filtering path in the schedule
def massSearchReplaceAnyInputTag
Definition: helpers.py:262
def defineMixing
Definition: Mixing.py:167
inliner
needs to be in reverse order
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def convertToUnscheduled
Definition: Utilities.py:69
list object
Definition: dbtoconf.py:77
def cleanUnscheduled
Definition: Utilities.py:107
double split
Definition: MVATrainer.cc:139
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run