CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python
2 
3 __version__ = "$Revision: 1.19 $"
4 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
5 
6 import FWCore.ParameterSet.Config as cms
7 from FWCore.ParameterSet.Modules import _Module
8 import sys
9 import re
10 import collections
11 from subprocess import Popen,PIPE
12 import FWCore.ParameterSet.DictTypes as DictTypes
13 class Options:
14  pass
15 
16 # the canonical defaults
17 defaultOptions = Options()
18 defaultOptions.datamix = 'DataOnSim'
19 defaultOptions.isMC=False
20 defaultOptions.isData=True
21 defaultOptions.step=''
22 defaultOptions.pileup='NoPileUp'
23 defaultOptions.pileup_input = None
24 defaultOptions.pileup_dasoption = ''
25 defaultOptions.geometry = 'SimDB'
26 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
27 defaultOptions.magField = ''
28 defaultOptions.conditions = None
29 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
30 defaultOptions.harvesting= 'AtRunEnd'
31 defaultOptions.gflash = False
32 defaultOptions.number = -1
33 defaultOptions.number_out = None
34 defaultOptions.arguments = ""
35 defaultOptions.name = "NO NAME GIVEN"
36 defaultOptions.evt_type = ""
37 defaultOptions.filein = ""
38 defaultOptions.dasquery=""
39 defaultOptions.dasoption=""
40 defaultOptions.secondfilein = ""
41 defaultOptions.customisation_file = []
42 defaultOptions.customisation_file_unsch = []
43 defaultOptions.customise_commands = ""
44 defaultOptions.inline_custom=False
45 defaultOptions.particleTable = 'pythiapdt'
46 defaultOptions.particleTableList = ['pythiapdt','pdt']
47 defaultOptions.dirin = ''
48 defaultOptions.dirout = ''
49 defaultOptions.filetype = 'EDM'
50 defaultOptions.fileout = 'output.root'
51 defaultOptions.filtername = ''
52 defaultOptions.lazy_download = False
53 defaultOptions.custom_conditions = ''
54 defaultOptions.hltProcess = ''
55 defaultOptions.eventcontent = None
56 defaultOptions.datatier = None
57 defaultOptions.inlineEventContent = True
58 defaultOptions.inlineObjets =''
59 defaultOptions.hideGen=False
60 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
61 defaultOptions.beamspot=None
62 defaultOptions.outputDefinition =''
63 defaultOptions.inputCommands = None
64 defaultOptions.outputCommands = None
65 defaultOptions.inputEventContent = ''
66 defaultOptions.dropDescendant = False
67 defaultOptions.relval = None
68 defaultOptions.slhc = None
69 defaultOptions.profile = None
70 defaultOptions.isRepacked = False
71 defaultOptions.restoreRNDSeeds = False
72 defaultOptions.donotDropOnInput = ''
73 defaultOptions.python_filename =''
74 defaultOptions.io=None
75 defaultOptions.lumiToProcess=None
76 defaultOptions.fast=False
77 defaultOptions.runsAndWeightsForMC = None
78 defaultOptions.runsScenarioForMC = None
79 defaultOptions.runUnscheduled = False
80 defaultOptions.timeoutOutput = False
81 defaultOptions.nThreads = '1'
82 
83 # some helper routines
84 def dumpPython(process,name):
85  theObject = getattr(process,name)
86  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
87  return "process."+name+" = " + theObject.dumpPython("process")
88  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
89  return "process."+name+" = " + theObject.dumpPython()+"\n"
90  else:
91  return "process."+name+" = " + theObject.dumpPython()+"\n"
92 def filesFromList(fileName,s=None):
93  import os
94  import FWCore.ParameterSet.Config as cms
95  prim=[]
96  sec=[]
97  for line in open(fileName,'r'):
98  if line.count(".root")>=2:
99  #two files solution...
100  entries=line.replace("\n","").split()
101  if not entries[0] in prim:
102  prim.append(entries[0])
103  if not entries[1] in sec:
104  sec.append(entries[1])
105  elif (line.find(".root")!=-1):
106  entry=line.replace("\n","")
107  if not entry in prim:
108  prim.append(entry)
109  if s:
110  if not hasattr(s,"fileNames"):
111  s.fileNames=cms.untracked.vstring(prim)
112  else:
113  s.fileNames.extend(prim)
114  if len(sec)!=0:
115  if not hasattr(s,"secondaryFileNames"):
116  s.secondaryFileNames=cms.untracked.vstring(sec)
117  else:
118  s.secondaryFileNames.extend(sec)
119  print "found files: ",prim
120  if len(prim)==0:
121  raise Exception("There are not files in input from the file list")
122  if len(sec)!=0:
123  print "found parent files:",sec
124  return (prim,sec)
125 
126 def filesFromDASQuery(query,option="",s=None):
127  import os,time
128  import FWCore.ParameterSet.Config as cms
129  prim=[]
130  sec=[]
131  print "the query is",query
132  eC=5
133  count=0
134  while eC!=0 and count<3:
135  if count!=0:
136  print 'Sleeping, then retrying DAS'
137  time.sleep(100)
138  p = Popen('das_client.py %s --query "%s"'%(option,query), stdout=PIPE,shell=True)
139  pipe=p.stdout.read()
140  tupleP = os.waitpid(p.pid, 0)
141  eC=tupleP[1]
142  count=count+1
143  if eC==0:
144  print "DAS succeeded after",count,"attempts",eC
145  else:
146  print "DAS failed 3 times- I give up"
147  for line in pipe.split('\n'):
148  if line.count(".root")>=2:
149  #two files solution...
150  entries=line.replace("\n","").split()
151  if not entries[0] in prim:
152  prim.append(entries[0])
153  if not entries[1] in sec:
154  sec.append(entries[1])
155  elif (line.find(".root")!=-1):
156  entry=line.replace("\n","")
157  if not entry in prim:
158  prim.append(entry)
159  if s:
160  if not hasattr(s,"fileNames"):
161  s.fileNames=cms.untracked.vstring(prim)
162  else:
163  s.fileNames.extend(prim)
164  if len(sec)!=0:
165  if not hasattr(s,"secondaryFileNames"):
166  s.secondaryFileNames=cms.untracked.vstring(sec)
167  else:
168  s.secondaryFileNames.extend(sec)
169  print "found files: ",prim
170  if len(sec)!=0:
171  print "found parent files:",sec
172  return (prim,sec)
173 
174 def MassReplaceInputTag(aProcess,oldT="rawDataCollector",newT="rawDataRepacker"):
175  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
176  for s in aProcess.paths_().keys():
177  massSearchReplaceAnyInputTag(getattr(aProcess,s),oldT,newT)
178 
179 def anyOf(listOfKeys,dict,opt=None):
180  for k in listOfKeys:
181  if k in dict:
182  toReturn=dict[k]
183  dict.pop(k)
184  return toReturn
185  if opt!=None:
186  return opt
187  else:
188  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
189 
190 class ConfigBuilder(object):
191  """The main building routines """
192 
193  def __init__(self, options, process = None, with_output = False, with_input = False ):
194  """options taken from old cmsDriver and optparse """
195 
196  options.outfile_name = options.dirout+options.fileout
197 
198  self._options = options
199 
200  if self._options.isData and options.isMC:
201  raise Exception("ERROR: You may specify only --data or --mc, not both")
202  #if not self._options.conditions:
203  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
204 
205  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
206  if 'ENDJOB' in self._options.step:
207  if (hasattr(self._options,"outputDefinition") and \
208  self._options.outputDefinition != '' and \
209  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
210  (hasattr(self._options,"datatier") and \
211  self._options.datatier and \
212  'DQMIO' in self._options.datatier):
213  print "removing ENDJOB from steps since not compatible with DQMIO dataTier"
214  self._options.step=self._options.step.replace(',ENDJOB','')
215 
216 
217 
218  # what steps are provided by this class?
219  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
220  self.stepMap={}
221  self.stepKeys=[]
222  for step in self._options.step.split(","):
223  if step=='': continue
224  stepParts = step.split(":")
225  stepName = stepParts[0]
226  if stepName not in stepList and not stepName.startswith('re'):
227  raise ValueError("Step "+stepName+" unknown")
228  if len(stepParts)==1:
229  self.stepMap[stepName]=""
230  elif len(stepParts)==2:
231  self.stepMap[stepName]=stepParts[1].split('+')
232  elif len(stepParts)==3:
233  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
234  else:
235  raise ValueError("Step definition "+step+" invalid")
236  self.stepKeys.append(stepName)
237 
238  #print "map of steps is:",self.stepMap
239 
240  self.with_output = with_output
241  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
242  self.with_output = False
243  self.with_input = with_input
244  if process == None:
245  self.process = cms.Process(self._options.name)
246  else:
247  self.process = process
248  self.imports = []
249  self.importsUnsch = []
250  self.define_Configs()
251  self.schedule = list()
252 
253  # we are doing three things here:
254  # creating a process to catch errors
255  # building the code to re-create the process
256 
257  self.additionalCommands = []
258  # TODO: maybe a list of to be dumped objects would help as well
259  self.blacklist_paths = []
260  self.addedObjects = []
261  self.additionalOutputs = {}
262 
263  self.productionFilterSequence = None
264  self.nextScheduleIsConditional=False
265  self.conditionalPaths=[]
266  self.excludedPaths=[]
267 
268  def profileOptions(self):
269  """
270  addIgProfService
271  Function to add the igprof profile service so that you can dump in the middle
272  of the run.
273  """
274  profileOpts = self._options.profile.split(':')
275  profilerStart = 1
276  profilerInterval = 100
277  profilerFormat = None
278  profilerJobFormat = None
279 
280  if len(profileOpts):
281  #type, given as first argument is unused here
282  profileOpts.pop(0)
283  if len(profileOpts):
284  startEvent = profileOpts.pop(0)
285  if not startEvent.isdigit():
286  raise Exception("%s is not a number" % startEvent)
287  profilerStart = int(startEvent)
288  if len(profileOpts):
289  eventInterval = profileOpts.pop(0)
290  if not eventInterval.isdigit():
291  raise Exception("%s is not a number" % eventInterval)
292  profilerInterval = int(eventInterval)
293  if len(profileOpts):
294  profilerFormat = profileOpts.pop(0)
295 
296 
297  if not profilerFormat:
298  profilerFormat = "%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace("_cfi", ""),
299  self._options.step,
300  self._options.pileup,
301  self._options.conditions,
302  self._options.datatier,
303  self._options.profileTypeLabel)
304  if not profilerJobFormat and profilerFormat.endswith(".gz"):
305  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
306  elif not profilerJobFormat:
307  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
308 
309  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
310 
311  def load(self,includeFile):
312  includeFile = includeFile.replace('/','.')
313  self.process.load(includeFile)
314  return sys.modules[includeFile]
315 
316  def loadAndRemember(self, includeFile,unsch=0):
317  """helper routine to load am memorize imports"""
318  # we could make the imports a on-the-fly data method of the process instance itself
319  # not sure if the latter is a good idea
320  includeFile = includeFile.replace('/','.')
321  if unsch==0:
322  self.imports.append(includeFile)
323  self.process.load(includeFile)
324  return sys.modules[includeFile]
325  else:
326  self.importsUnsch.append(includeFile)
327  return 0#sys.modules[includeFile]
328 
329  def executeAndRemember(self, command):
330  """helper routine to remember replace statements"""
331  self.additionalCommands.append(command)
332  if not command.strip().startswith("#"):
333  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
334  import re
335  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
336  #exec(command.replace("process.","self.process."))
337 
338  def addCommon(self):
339  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
340  self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring('ProductNotFound'),fileMode = cms.untracked.string('FULLMERGE'))
341  else:
342  self.process.options = cms.untracked.PSet( )
343 
344  if self._options.runUnscheduled:
345  self.process.options.allowUnscheduled=cms.untracked.bool(True)
346 
347  self.addedObjects.append(("","options"))
348 
349  if self._options.lazy_download:
350  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
351  stats = cms.untracked.bool(True),
352  enable = cms.untracked.bool(True),
353  cacheHint = cms.untracked.string("lazy-download"),
354  readHint = cms.untracked.string("read-ahead-buffered")
355  )
356  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
357 
358  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
359  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
360 
361  if self._options.profile:
362  (start, interval, eventFormat, jobFormat)=self.profileOptions()
363  self.process.IgProfService = cms.Service("IgProfService",
364  reportFirstEvent = cms.untracked.int32(start),
365  reportEventInterval = cms.untracked.int32(interval),
366  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
367  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
368  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
369 
370  def addMaxEvents(self):
371  """Here we decide how many evts will be processed"""
372  self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(int(self._options.number)))
373  if self._options.number_out:
374  self.process.maxEvents.output = cms.untracked.int32(int(self._options.number_out))
375  self.addedObjects.append(("","maxEvents"))
376 
377  def addSource(self):
378  """Here the source is built. Priority: file, generator"""
379  self.addedObjects.append(("Input source","source"))
380 
381  def filesFromOption(self):
382  for entry in self._options.filein.split(','):
383  print "entry",entry
384  if entry.startswith("filelist:"):
385  filesFromList(entry[9:],self.process.source)
386  elif entry.startswith("dbs:") or entry.startswith("das:"):
387  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
388  else:
389  self.process.source.fileNames.append(self._options.dirin+entry)
390  if self._options.secondfilein:
391  if not hasattr(self.process.source,"secondaryFileNames"):
392  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
393  for entry in self._options.secondfilein.split(','):
394  print "entry",entry
395  if entry.startswith("filelist:"):
396  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
397  elif entry.startswith("dbs:") or entry.startswith("das:"):
398  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
399  else:
400  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
401 
402  if self._options.filein or self._options.dasquery:
403  if self._options.filetype == "EDM":
404  self.process.source=cms.Source("PoolSource",
405  fileNames = cms.untracked.vstring(),
406  secondaryFileNames= cms.untracked.vstring())
407  filesFromOption(self)
408  elif self._options.filetype == "DAT":
409  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
410  filesFromOption(self)
411  elif self._options.filetype == "LHE":
412  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
413  if self._options.filein.startswith("lhe:"):
414  #list the article directory automatically
415  args=self._options.filein.split(':')
416  article=args[1]
417  print 'LHE input from article ',article
418  location='/store/lhe/'
419  import os
420  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
421  for line in textOfFiles:
422  for fileName in [x for x in line.split() if '.lhe' in x]:
423  self.process.source.fileNames.append(location+article+'/'+fileName)
424  if len(args)>2:
425  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
426  else:
427  filesFromOption(self)
428 
429 
430  elif self._options.filetype == "DQM":
431  self.process.source=cms.Source("DQMRootSource",
432  fileNames = cms.untracked.vstring())
433  filesFromOption(self)
434 
435  elif self._options.filetype == "DQMDAQ":
436  # FIXME: how to configure it if there are no input files specified?
437  self.process.source=cms.Source("DQMStreamerReader")
438 
439 
440  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
441  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
442 
443  if self._options.dasquery!='':
444  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
445  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
446 
447  ##drop LHEXMLStringProduct on input to save memory if appropriate
448  if 'GEN' in self.stepMap.keys():
449  if self._options.inputCommands:
450  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
451  else:
452  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
453 
454  if self.process.source and self._options.inputCommands:
455  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
456  for command in self._options.inputCommands.split(','):
457  # remove whitespace around the keep/drop statements
458  command = command.strip()
459  if command=='': continue
460  self.process.source.inputCommands.append(command)
461  if not self._options.dropDescendant:
462  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
463 
464  if self._options.lumiToProcess:
465  import FWCore.PythonUtilities.LumiList as LumiList
466  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
467 
468  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
469  if self.process.source is None:
470  self.process.source=cms.Source("EmptySource")
471 
472  # modify source in case of run-dependent MC
473  self.runsAndWeights=None
474  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
475  if not self._options.isMC :
476  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
477  if self._options.runsAndWeightsForMC:
478  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
479  else:
480  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
481  if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
482  __import__(RunsAndWeights[self._options.runsScenarioForMC])
483  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
484  else:
485  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
486 
487  if self.runsAndWeights:
488  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
489  ThrowAndSetRandomRun.throwAndSetRandomRun(self.process.source,self.runsAndWeights)
490  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
491  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
492 
493  return
494 
495  def addOutput(self):
496  """ Add output module to the process """
497  result=""
498  if self._options.outputDefinition:
499  if self._options.datatier:
500  print "--datatier & --eventcontent options ignored"
501 
502  #new output convention with a list of dict
503  outList = eval(self._options.outputDefinition)
504  for (id,outDefDict) in enumerate(outList):
505  outDefDictStr=outDefDict.__str__()
506  if not isinstance(outDefDict,dict):
507  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
508  #requires option: tier
509  theTier=anyOf(['t','tier','dataTier'],outDefDict)
510  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
511  ## event content
512  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
513  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
514  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
515  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
516  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
517  # module label has a particular role
518  if not theModuleLabel:
519  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
520  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
521  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
522  ]
523  for name in tryNames:
524  if not hasattr(self.process,name):
525  theModuleLabel=name
526  break
527  if not theModuleLabel:
528  raise Exception("cannot find a module label for specification: "+outDefDictStr)
529  if id==0:
530  defaultFileName=self._options.outfile_name
531  else:
532  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
533 
534  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
535  if not theFileName.endswith('.root'):
536  theFileName+='.root'
537 
538  if len(outDefDict.keys()):
539  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
540  if theStreamType=='DQMIO': theStreamType='DQM'
541  if theStreamType=='ALL':
542  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
543  else:
544  theEventContent = getattr(self.process, theStreamType+"EventContent")
545 
546 
547  addAlCaSelects=False
548  if theStreamType=='ALCARECO' and not theFilterName:
549  theFilterName='StreamALCACombined'
550  addAlCaSelects=True
551 
552  CppType='PoolOutputModule'
553  if self._options.timeoutOutput:
554  CppType='TimeoutPoolOutputModule'
555  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
556  output = cms.OutputModule(CppType,
557  theEventContent.clone(),
558  fileName = cms.untracked.string(theFileName),
559  dataset = cms.untracked.PSet(
560  dataTier = cms.untracked.string(theTier),
561  filterName = cms.untracked.string(theFilterName))
562  )
563  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
564  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
565  if not theSelectEvent and hasattr(self.process,'filtering_step'):
566  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
567  if theSelectEvent:
568  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
569 
570  if addAlCaSelects:
571  if not hasattr(output,'SelectEvents'):
572  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
573  for alca in self.AlCaPaths:
574  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
575 
576 
577  if hasattr(self.process,theModuleLabel):
578  raise Exception("the current process already has a module "+theModuleLabel+" defined")
579  #print "creating output module ",theModuleLabel
580  setattr(self.process,theModuleLabel,output)
581  outputModule=getattr(self.process,theModuleLabel)
582  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
583  path=getattr(self.process,theModuleLabel+'_step')
584  self.schedule.append(path)
585 
586  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
587  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"):
588  return label
589  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
590  if theExtraOutputCommands:
591  if not isinstance(theExtraOutputCommands,list):
592  raise Exception("extra ouput command in --option must be a list of strings")
593  if hasattr(self.process,theStreamType+"EventContent"):
594  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
595  else:
596  outputModule.outputCommands.extend(theExtraOutputCommands)
597 
598  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
599 
600  ##ends the --output options model
601  return result
602 
603  streamTypes=self._options.eventcontent.split(',')
604  tiers=self._options.datatier.split(',')
605  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
606  raise Exception("number of event content arguments does not match number of datatier arguments")
607 
608  # if the only step is alca we don't need to put in an output
609  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
610  return "\n"
611 
612  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
613  if streamType=='': continue
614  if streamType=='DQMIO': streamType='DQM'
615  theEventContent = getattr(self.process, streamType+"EventContent")
616  if i==0:
617  theFileName=self._options.outfile_name
618  theFilterName=self._options.filtername
619  else:
620  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
621  theFilterName=self._options.filtername
622  CppType='PoolOutputModule'
623  if self._options.timeoutOutput:
624  CppType='TimeoutPoolOutputModule'
625  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
626  output = cms.OutputModule(CppType,
627  theEventContent,
628  fileName = cms.untracked.string(theFileName),
629  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
630  filterName = cms.untracked.string(theFilterName)
631  )
632  )
633  if hasattr(self.process,"generation_step") and streamType!='LHE':
634  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
635  if hasattr(self.process,"filtering_step"):
636  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
637 
638  if streamType=='ALCARECO':
639  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
640 
641  if "MINIAOD" in streamType:
642  output.dropMetaData = cms.untracked.string('ALL')
643  output.fastCloning= cms.untracked.bool(False)
644  output.overrideInputFileSplitLevels = cms.untracked.bool(True)
645 
646  outputModuleName=streamType+'output'
647  setattr(self.process,outputModuleName,output)
648  outputModule=getattr(self.process,outputModuleName)
649  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
650  path=getattr(self.process,outputModuleName+'_step')
651  self.schedule.append(path)
652 
653  if self._options.outputCommands and streamType!='DQM':
654  for evct in self._options.outputCommands.split(','):
655  if not evct: continue
656  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
657 
658  if not self._options.inlineEventContent:
659  def doNotInlineEventContent(instance,label = "process."+streamType+"EventContent.outputCommands"):
660  return label
661  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
662 
663  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
664 
665  return result
666 
668  """
669  Add selected standard sequences to the process
670  """
671  # load the pile up file
672  if self._options.pileup:
673  pileupSpec=self._options.pileup.split(',')[0]
674 
675  # Does the requested pile-up scenario exist?
676  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
677  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
678  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
679  raise Exception(message)
680 
681  # Put mixing parameters in a dictionary
682  if '.' in pileupSpec:
683  mixingDict={'file':pileupSpec}
684  elif pileupSpec.startswith('file:'):
685  mixingDict={'file':pileupSpec[5:]}
686  else:
687  import copy
688  mixingDict=copy.copy(Mixing[pileupSpec])
689  if len(self._options.pileup.split(','))>1:
690  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
691 
692  # Load the pu cfg file corresponding to the requested pu scenario
693  if 'file:' in pileupSpec:
694  #the file is local
695  self.process.load(mixingDict['file'])
696  print "inlining mixing module configuration"
697  self._options.inlineObjets+=',mix'
698  else:
699  self.loadAndRemember(mixingDict['file'])
700 
701  mixingDict.pop('file')
702  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
703  if self._options.pileup_input:
704  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
705  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
706  else:
707  mixingDict['F']=self._options.pileup_input.split(',')
708  specialization=defineMixing(mixingDict)
709  for command in specialization:
710  self.executeAndRemember(command)
711  if len(mixingDict)!=0:
712  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
713 
714 
715  # load the geometry file
716  try:
717  if len(self.stepMap):
718  self.loadAndRemember(self.GeometryCFF)
719  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
720  self.loadAndRemember(self.SimGeometryCFF)
721  if self.geometryDBLabel:
722  self.executeAndRemember('process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
723  except ImportError:
724  print "Geometry option",self._options.geometry,"unknown."
725  raise
726 
727  if len(self.stepMap):
728  self.loadAndRemember(self.magFieldCFF)
729 
730  for stepName in self.stepKeys:
731  stepSpec = self.stepMap[stepName]
732  print "Step:", stepName,"Spec:",stepSpec
733  if stepName.startswith('re'):
734  ##add the corresponding input content
735  if stepName[2:] not in self._options.donotDropOnInput:
736  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
737  stepName=stepName[2:]
738  if stepSpec=="":
739  getattr(self,"prepare_"+stepName)(sequence = getattr(self,stepName+"DefaultSeq"))
740  elif type(stepSpec)==list:
741  getattr(self,"prepare_"+stepName)(sequence = '+'.join(stepSpec))
742  elif type(stepSpec)==tuple:
743  getattr(self,"prepare_"+stepName)(sequence = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
744  else:
745  raise ValueError("Invalid step definition")
746 
747  if self._options.restoreRNDSeeds!=False:
748  #it is either True, or a process name
749  if self._options.restoreRNDSeeds==True:
750  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
751  else:
752  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
753  if self._options.inputEventContent or self._options.inputCommands:
754  if self._options.inputCommands:
755  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
756  else:
757  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
758 
759 
761  if self._options.inputEventContent:
762  import copy
763  def dropSecondDropStar(iec):
764  #drop occurence of 'drop *' in the list
765  count=0
766  for item in iec:
767  if item=='drop *':
768  if count!=0:
769  iec.remove(item)
770  count+=1
771 
772 
773  ## allow comma separated input eventcontent
774  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
775  for evct in self._options.inputEventContent.split(','):
776  if evct=='': continue
777  theEventContent = getattr(self.process, evct+"EventContent")
778  if hasattr(theEventContent,'outputCommands'):
779  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
780  if hasattr(theEventContent,'inputCommands'):
781  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
782 
783  dropSecondDropStar(self.process.source.inputCommands)
784 
785  if not self._options.dropDescendant:
786  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
787 
788 
789  return
790 
791  def addConditions(self):
792  """Add conditions to the process"""
793  if not self._options.conditions: return
794 
795  if 'FrontierConditions_GlobalTag' in self._options.conditions:
796  print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line'
797  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
798 
799  self.loadAndRemember(self.ConditionsDefaultCFF)
800  from Configuration.AlCa.GlobalTag import GlobalTag
801  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
802  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
803  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
804 
805  if self._options.slhc:
806  self.loadAndRemember("SLHCUpgradeSimulations/Geometry/fakeConditions_%s_cff"%(self._options.slhc,))
807 
808 
809  def addCustomise(self,unsch=0):
810  """Include the customise code """
811 
812  custOpt=[]
813  if unsch==0:
814  for c in self._options.customisation_file:
815  custOpt.extend(c.split(","))
816  else:
817  for c in self._options.customisation_file_unsch:
818  custOpt.extend(c.split(","))
819 
821  for opt in custOpt:
822  if opt=='': continue
823  if opt.count('.')>1:
824  raise Exception("more than . in the specification:"+opt)
825  fileName=opt.split('.')[0]
826  if opt.count('.')==0: rest='customise'
827  else:
828  rest=opt.split('.')[1]
829  if rest=='py': rest='customise' #catch the case of --customise file.py
830 
831  if fileName in custMap:
832  custMap[fileName].extend(rest.split('+'))
833  else:
834  custMap[fileName]=rest.split('+')
835 
836  if len(custMap)==0:
837  final_snippet='\n'
838  else:
839  final_snippet='\n# customisation of the process.\n'
840 
841  allFcn=[]
842  for opt in custMap:
843  allFcn.extend(custMap[opt])
844  for fcn in allFcn:
845  if allFcn.count(fcn)!=1:
846  raise Exception("cannot specify twice "+fcn+" as a customisation method")
847 
848  for f in custMap:
849  # let python search for that package and do syntax checking at the same time
850  packageName = f.replace(".py","").replace("/",".")
851  __import__(packageName)
852  package = sys.modules[packageName]
853 
854  # now ask the package for its definition and pick .py instead of .pyc
855  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
856 
857  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
858  if self._options.inline_custom:
859  for line in file(customiseFile,'r'):
860  if "import FWCore.ParameterSet.Config" in line:
861  continue
862  final_snippet += line
863  else:
864  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
865  for fcn in custMap[f]:
866  print "customising the process with",fcn,"from",f
867  if not hasattr(package,fcn):
868  #bound to fail at run time
869  raise Exception("config "+f+" has no function "+fcn)
870  #execute the command
871  self.process=getattr(package,fcn)(self.process)
872  #and print it in the configuration
873  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
874  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
875 
876  if len(custMap)!=0:
877  final_snippet += '\n# End of customisation functions\n'
878 
879  ### now for a useful command
880  if unsch==1 or not self._options.runUnscheduled:
881  if self._options.customise_commands:
882  import string
883  final_snippet +='\n# Customisation from command line'
884  for com in self._options.customise_commands.split('\\n'):
885  com=string.lstrip(com)
886  self.executeAndRemember(com)
887  final_snippet +='\n'+com
888 
889  return final_snippet
890 
891  #----------------------------------------------------------------------------
892  # here the methods to define the python includes for each step or
893  # conditions
894  #----------------------------------------------------------------------------
895  def define_Configs(self):
896  if len(self.stepMap):
897  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
898  if self._options.particleTable not in defaultOptions.particleTableList:
899  print 'Invalid particle table provided. Options are:'
900  print defaultOptions.particleTable
901  sys.exit(-1)
902  else:
903  if len(self.stepMap):
904  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
905 
906  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
907 
908  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
909  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
910  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
911  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
912  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
913  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
914  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
915  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
916  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
917  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
918  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
919  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
920  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
921  self.EIDefaultCFF=None
922  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
923  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
924  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
925  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
926  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
927  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
928  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
929  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
930  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
931  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
932  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
933 
934  if "DATAMIX" in self.stepMap.keys():
935  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
936  if self._options.datamix == 'PreMix':
937  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDMPreMix_cff"
938  else:
939  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
940  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
941  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
942 
943  if "DIGIPREMIX" in self.stepMap.keys():
944  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_PreMix_cff"
945 
946  self.ALCADefaultSeq=None
947  self.LHEDefaultSeq='externalLHEProducer'
948  self.GENDefaultSeq='pgen'
949  self.SIMDefaultSeq='psim'
950  self.DIGIDefaultSeq='pdigi'
951  self.DIGIPREMIXDefaultSeq='pdigi'
952  self.DIGIPREMIX_S2DefaultSeq='pdigi'
953  self.DATAMIXDefaultSeq=None
954  self.DIGI2RAWDefaultSeq='DigiToRaw'
955  self.HLTDefaultSeq='GRun'
956  self.L1DefaultSeq=None
957  self.L1REPACKDefaultSeq='GT'
958  self.HARVESTINGDefaultSeq=None
959  self.ALCAHARVESTDefaultSeq=None
960  self.CFWRITERDefaultSeq=None
961  self.RAW2DIGIDefaultSeq='RawToDigi'
962  self.L1RecoDefaultSeq='L1Reco'
963  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
964  if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
965  self.RECODefaultSeq='reconstruction'
966  else:
967  self.RECODefaultSeq='reconstruction_fromRECO'
968 
969  self.EIDefaultSeq='top'
970  self.POSTRECODefaultSeq=None
971  self.L1HwValDefaultSeq='L1HwVal'
972  self.DQMDefaultSeq='DQMOffline'
973  self.VALIDATIONDefaultSeq=''
974  self.ENDJOBDefaultSeq='endOfProcess'
975  self.REPACKDefaultSeq='DigiToRawRepack'
976  self.PATDefaultSeq='miniAOD'
977 
978  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
979 
980  if not self._options.beamspot:
981  self._options.beamspot=VtxSmearedDefaultKey
982 
983  # if its MC then change the raw2digi
984  if self._options.isMC==True:
985  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
986  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
987  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
988  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
989  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
990  else:
991  self._options.beamspot = None
992 
993  #patch for gen, due to backward incompatibility
994  if 'reGEN' in self.stepMap:
995  self.GENDefaultSeq='fixGenInfo'
996 
997  if self._options.scenario=='cosmics':
998  self._options.pileup='Cosmics'
999  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1000  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1001  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1002  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1003  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1004  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1005  if self._options.isMC==True:
1006  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1007  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1008  self.RECODefaultSeq='reconstructionCosmics'
1009  self.DQMDefaultSeq='DQMOfflineCosmics'
1010 
1011  if self._options.scenario=='HeavyIons':
1012  if not self._options.beamspot:
1013  self._options.beamspot=VtxSmearedHIDefaultKey
1014  self.HLTDefaultSeq = 'HIon'
1015  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1016  self.VALIDATIONDefaultSeq=''
1017  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1018  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1019  self.RECODefaultSeq='reconstructionHeavyIons'
1020  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1021  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1022  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1023  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1024  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1025  if self._options.isMC==True:
1026  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1027 
1028 
1029  self.RAW2RECODefaultSeq=','.join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1030 
1031  self.USERDefaultSeq='user'
1032  self.USERDefaultCFF=None
1033 
1034  # the magnetic field
1035  if self._options.isData:
1036  if self._options.magField==defaultOptions.magField:
1037  print "magnetic field option forced to: AutoFromDBCurrent"
1038  self._options.magField='AutoFromDBCurrent'
1039  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1040  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1041 
1042  # the geometry
1043  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1044  self.geometryDBLabel=None
1045  simGeometry=''
1046  if self._options.fast:
1047  if 'start' in self._options.conditions.lower():
1048  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1049  else:
1050  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1051  else:
1052  def inGeometryKeys(opt):
1053  from Configuration.StandardSequences.GeometryConf import GeometryConf
1054  if opt in GeometryConf:
1055  return GeometryConf[opt]
1056  else:
1057  return opt
1058 
1059  geoms=self._options.geometry.split(',')
1060  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1061  if len(geoms)==2:
1062  #may specify the reco geometry
1063  if '/' in geoms[1] or '_cff' in geoms[1]:
1064  self.GeometryCFF=geoms[1]
1065  else:
1066  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1067 
1068  if (geoms[0].startswith('DB:')):
1069  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1070  self.geometryDBLabel=geoms[0][3:]
1071  print "with DB:"
1072  else:
1073  if '/' in geoms[0] or '_cff' in geoms[0]:
1074  self.SimGeometryCFF=geoms[0]
1075  else:
1076  simGeometry=geoms[0]
1077  if self._options.gflash==True:
1078  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1079  else:
1080  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1081 
1082  # synchronize the geometry configuration and the FullSimulation sequence to be used
1083  if simGeometry not in defaultOptions.geometryExtendedOptions:
1084  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1085 
1086  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1087  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1088  self._options.beamspot='NoSmear'
1089 
1090  # fastsim requires some changes to the default cff files and sequences
1091  if self._options.fast:
1092  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1093  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1094  self.VALIDATIONDefaultCFF = "FastSimulation.Configuration.Validation_cff"
1095  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1096  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1097  self.L1RecoDefaultCFF='FastSimulation.Configuration.L1Reco_cff'
1098 
1099 
1100 
1101  # Mixing
1102  if self._options.pileup=='default':
1103  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1104  self._options.pileup=MixingDefaultKey
1105 
1106 
1107  #not driven by a default cff anymore
1108  if self._options.isData:
1109  self._options.pileup=None
1110 
1111  if self._options.slhc:
1112  self.GeometryCFF='SLHCUpgradeSimulations.Geometry.%s_cmsSimIdealGeometryXML_cff'%(self._options.slhc,)
1113  if 'stdgeom' not in self._options.slhc:
1114  self.SimGeometryCFF='SLHCUpgradeSimulations.Geometry.%s_cmsSimIdealGeometryXML_cff'%(self._options.slhc,)
1115  self.DIGIDefaultCFF='SLHCUpgradeSimulations/Geometry/Digi_%s_cff'%(self._options.slhc,)
1116  if self._options.pileup!=defaultOptions.pileup:
1117  self._options.pileup='SLHC_%s_%s'%(self._options.pileup,self._options.slhc)
1118 
1119  self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1120 
1121  # for alca, skims, etc
1122  def addExtraStream(self,name,stream,workflow='full'):
1123  # define output module and go from there
1124  output = cms.OutputModule("PoolOutputModule")
1125  if stream.selectEvents.parameters_().__len__()!=0:
1126  output.SelectEvents = stream.selectEvents
1127  else:
1128  output.SelectEvents = cms.untracked.PSet()
1129  output.SelectEvents.SelectEvents=cms.vstring()
1130  if isinstance(stream.paths,tuple):
1131  for path in stream.paths:
1132  output.SelectEvents.SelectEvents.append(path.label())
1133  else:
1134  output.SelectEvents.SelectEvents.append(stream.paths.label())
1135 
1136 
1137 
1138  if isinstance(stream.content,str):
1139  evtPset=getattr(self.process,stream.content)
1140  for p in evtPset.parameters_():
1141  setattr(output,p,getattr(evtPset,p))
1142  if not self._options.inlineEventContent:
1143  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1144  return label
1145  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1146  else:
1147  output.outputCommands = stream.content
1148 
1149 
1150  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1151 
1152  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1153  filterName = cms.untracked.string(stream.name))
1154 
1155  if self._options.filtername:
1156  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1157 
1158  #add an automatic flushing to limit memory consumption
1159  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1160 
1161  if workflow in ("producers,full"):
1162  if isinstance(stream.paths,tuple):
1163  for path in stream.paths:
1164  self.schedule.append(path)
1165  else:
1166  self.schedule.append(stream.paths)
1167 
1168 
1169  # in case of relvals we don't want to have additional outputs
1170  if (not self._options.relval) and workflow in ("full","output"):
1171  self.additionalOutputs[name] = output
1172  setattr(self.process,name,output)
1173 
1174  if workflow == 'output':
1175  # adjust the select events to the proper trigger results from previous process
1176  filterList = output.SelectEvents.SelectEvents
1177  for i, filter in enumerate(filterList):
1178  filterList[i] = filter+":"+self._options.triggerResultsProcess
1179 
1180  return output
1181 
1182  #----------------------------------------------------------------------------
1183  # here the methods to create the steps. Of course we are doing magic here ;)
1184  # prepare_STEPNAME modifies self.process and what else's needed.
1185  #----------------------------------------------------------------------------
1186 
1187  def loadDefaultOrSpecifiedCFF(self, sequence,defaultCFF,unsch=0):
1188  if ( len(sequence.split('.'))==1 ):
1189  l=self.loadAndRemember(defaultCFF,unsch)
1190  elif ( len(sequence.split('.'))==2 ):
1191  l=self.loadAndRemember(sequence.split('.')[0],unsch)
1192  sequence=sequence.split('.')[1]
1193  else:
1194  print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a"
1195  print sequence,"not recognized"
1196  raise
1197  return l
1198 
1199  def scheduleSequence(self,seq,prefix,what='Path'):
1200  if '*' in seq:
1201  #create only one path with all sequences in it
1202  for i,s in enumerate(seq.split('*')):
1203  if i==0:
1204  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1205  else:
1206  p=getattr(self.process,prefix)
1207  p+=getattr(self.process, s)
1208  self.schedule.append(getattr(self.process,prefix))
1209  return
1210  else:
1211  #create as many path as many sequences
1212  if not '+' in seq:
1213  if self.nextScheduleIsConditional:
1214  self.conditionalPaths.append(prefix)
1215  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1216  self.schedule.append(getattr(self.process,prefix))
1217  else:
1218  for i,s in enumerate(seq.split('+')):
1219  sn=prefix+'%d'%(i)
1220  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1221  self.schedule.append(getattr(self.process,sn))
1222  return
1223 
1224  def scheduleSequenceAtEnd(self,seq,prefix):
1225  self.scheduleSequence(seq,prefix,what='EndPath')
1226  return
1227 
1228  def prepare_ALCAPRODUCER(self, sequence = None):
1229  self.prepare_ALCA(sequence, workflow = "producers")
1230 
1231  def prepare_ALCAOUTPUT(self, sequence = None):
1232  self.prepare_ALCA(sequence, workflow = "output")
1233 
1234  def prepare_ALCA(self, sequence = None, workflow = 'full'):
1235  """ Enrich the process with alca streams """
1236  print 'DL enriching',workflow,sequence
1237  alcaConfig=self.loadDefaultOrSpecifiedCFF(sequence,self.ALCADefaultCFF)
1238  sequence = sequence.split('.')[-1]
1239 
1240  # decide which ALCA paths to use
1241  alcaList = sequence.split("+")
1242  maxLevel=0
1243  from Configuration.AlCa.autoAlca import autoAlca
1244  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1245  self.expandMapping(alcaList,autoAlca)
1246  self.AlCaPaths=[]
1247  for name in alcaConfig.__dict__:
1248  alcastream = getattr(alcaConfig,name)
1249  shortName = name.replace('ALCARECOStream','')
1250  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1251  output = self.addExtraStream(name,alcastream, workflow = workflow)
1252  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1253  self.AlCaPaths.append(shortName)
1254  if 'DQM' in alcaList:
1255  if not self._options.inlineEventContent and hasattr(self.process,name):
1256  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1257  else:
1258  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1259 
1260  #rename the HLT process name in the alca modules
1261  if self._options.hltProcess or 'HLT' in self.stepMap:
1262  if isinstance(alcastream.paths,tuple):
1263  for path in alcastream.paths:
1264  self.renameHLTprocessInSequence(path.label())
1265  else:
1266  self.renameHLTprocessInSequence(alcastream.paths.label())
1267 
1268  for i in range(alcaList.count(shortName)):
1269  alcaList.remove(shortName)
1270 
1271  # DQM needs a special handling
1272  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1273  path = getattr(alcaConfig,name)
1274  self.schedule.append(path)
1275  alcaList.remove('DQM')
1276 
1277  if isinstance(alcastream,cms.Path):
1278  #black list the alca path so that they do not appear in the cfg
1279  self.blacklist_paths.append(alcastream)
1280 
1281 
1282  if len(alcaList) != 0:
1283  available=[]
1284  for name in alcaConfig.__dict__:
1285  alcastream = getattr(alcaConfig,name)
1286  if isinstance(alcastream,cms.FilteredStream):
1287  available.append(name.replace('ALCARECOStream',''))
1288  print "The following alcas could not be found "+str(alcaList)
1289  print "available ",available
1290  #print "verify your configuration, ignoring for now"
1291  raise Exception("The following alcas could not be found "+str(alcaList))
1292 
1293  def prepare_LHE(self, sequence = None):
1294  #load the fragment
1295  ##make it loadable
1296  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1297  print "Loading lhe fragment from",loadFragment
1298  __import__(loadFragment)
1299  self.process.load(loadFragment)
1300  ##inline the modules
1301  self._options.inlineObjets+=','+sequence
1302 
1303  getattr(self.process,sequence).nEvents = int(self._options.number)
1304 
1305  #schedule it
1306  self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1307  self.excludedPaths.append("lhe_step")
1308  self.schedule.append( self.process.lhe_step )
1309 
1310  def prepare_GEN(self, sequence = None):
1311  """ load the fragment of generator configuration """
1312  loadFailure=False
1313  #remove trailing .py
1314  #support old style .cfi by changing into something.cfi into something_cfi
1315  #remove python/ from the name
1316  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1317  #standard location of fragments
1318  if not '/' in loadFragment:
1319  loadFragment='Configuration.Generator.'+loadFragment
1320  else:
1321  loadFragment=loadFragment.replace('/','.')
1322  try:
1323  print "Loading generator fragment from",loadFragment
1324  __import__(loadFragment)
1325  except:
1326  loadFailure=True
1327  #if self.process.source and self.process.source.type_()=='EmptySource':
1328  if not (self._options.filein or self._options.dasquery):
1329  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1330 
1331  if not loadFailure:
1332  generatorModule=sys.modules[loadFragment]
1333  genModules=generatorModule.__dict__
1334  #remove lhe producer module since this should have been
1335  #imported instead in the LHE step
1336  if self.LHEDefaultSeq in genModules:
1337  del genModules[self.LHEDefaultSeq]
1338 
1339  if self._options.hideGen:
1340  self.loadAndRemember(loadFragment)
1341  else:
1342  self.process.load(loadFragment)
1343  # expose the objects from that fragment to the configuration
1344  import FWCore.ParameterSet.Modules as cmstypes
1345  for name in genModules:
1346  theObject = getattr(generatorModule,name)
1347  if isinstance(theObject, cmstypes._Module):
1348  self._options.inlineObjets=name+','+self._options.inlineObjets
1349  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1350  self._options.inlineObjets+=','+name
1351 
1352  if sequence == self.GENDefaultSeq or sequence == 'pgen_genonly':
1353  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1354  self.productionFilterSequence = 'ProductionFilterSequence'
1355  elif 'generator' in genModules:
1356  self.productionFilterSequence = 'generator'
1357 
1358  """ Enrich the schedule with the rest of the generation step """
1359  self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1360  genSeqName=sequence.split('.')[-1]
1361 
1362  if True:
1363  try:
1364  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1365  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1366  self.loadAndRemember(cffToBeLoaded)
1367  except ImportError:
1368  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1369 
1370  if self._options.scenario == 'HeavyIons':
1371  if self._options.pileup=='HiMixGEN':
1372  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1373  else:
1374  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1375 
1376  self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1377  self.schedule.append(self.process.generation_step)
1378 
1379  #register to the genstepfilter the name of the path (static right now, but might evolve)
1380  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1381 
1382  if 'reGEN' in self.stepMap:
1383  #stop here
1384  return
1385 
1386  """ Enrich the schedule with the summary of the filter step """
1387  #the gen filter in the endpath
1388  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1389  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1390  return
1391 
1392  def prepare_SIM(self, sequence = None):
1393  """ Enrich the schedule with the simulation step"""
1394  self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1395  if not self._options.fast:
1396  if self._options.gflash==True:
1397  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1398 
1399  if self._options.magField=='0T':
1400  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1401  else:
1402  if self._options.magField=='0T':
1403  self.executeAndRemember("process.famosSimHits.UseMagneticField = cms.bool(False)")
1404 
1405  self.scheduleSequence(sequence.split('.')[-1],'simulation_step')
1406  return
1407 
1408  def prepare_DIGI(self, sequence = None):
1409  """ Enrich the schedule with the digitisation step"""
1410  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1411 
1412  if self._options.gflash==True:
1413  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1414 
1415  if sequence == 'pdigi_valid':
1416  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1417 
1418  if sequence != 'pdigi_nogen' and sequence != 'pdigi_valid_nogen' and not self.process.source.type_()=='EmptySource':
1419  if self._options.inputEventContent=='':
1420  self._options.inputEventContent='REGEN'
1421  else:
1422  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1423 
1424 
1425  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1426  return
1427 
1428  def prepare_DIGIPREMIX(self, sequence = None):
1429  """ Enrich the schedule with the digitisation step"""
1430  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1431 
1432  self.loadAndRemember("SimGeneral/MixingModule/digi_noNoise_cfi")
1433 
1434  if sequence == 'pdigi_valid':
1435  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1436  else:
1437  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1438 
1439  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1440  return
1441 
1442  def prepare_DIGIPREMIX_S2(self, sequence = None):
1443  """ Enrich the schedule with the digitisation step"""
1444  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1445 
1446  self.loadAndRemember("SimGeneral/MixingModule/digi_MixPreMix_cfi")
1447 
1448 
1449  if sequence == 'pdigi_valid':
1450  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1451  else:
1452  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1453 
1454  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1455  return
1456 
1457  def prepare_CFWRITER(self, sequence = None):
1458  """ Enrich the schedule with the crossing frame writer step"""
1459  self.loadAndRemember(self.CFWRITERDefaultCFF)
1460  self.scheduleSequence('pcfw','cfwriter_step')
1461  return
1462 
1463  def prepare_DATAMIX(self, sequence = None):
1464  """ Enrich the schedule with the digitisation step"""
1465  self.loadAndRemember(self.DATAMIXDefaultCFF)
1466  self.scheduleSequence('pdatamix','datamixing_step')
1467 
1468  if self._options.pileup_input:
1469  theFiles=''
1470  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1471  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1472  elif self._options.pileup_input.startswith("filelist:"):
1473  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1474  else:
1475  theFiles=self._options.pileup_input.split(',')
1476  #print theFiles
1477  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1478 
1479  return
1480 
1481  def prepare_DIGI2RAW(self, sequence = None):
1482  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGI2RAWDefaultCFF)
1483  self.scheduleSequence(sequence.split('.')[-1],'digi2raw_step')
1484  if "DIGIPREMIX" in self.stepMap.keys():
1485  self.executeAndRemember("process.esDigiToRaw.Label = cms.string('mix')") ##terrible hack - bypass zero suppression
1486  self.executeAndRemember("process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')") ##special readout mode for StripTracker
1487 
1488  return
1489 
1490  def prepare_REPACK(self, sequence = None):
1491  self.loadDefaultOrSpecifiedCFF(sequence,self.REPACKDefaultCFF)
1492  self.scheduleSequence(sequence.split('.')[-1],'digi2repack_step')
1493  return
1494 
1495  def prepare_L1(self, sequence = None):
1496  """ Enrich the schedule with the L1 simulation step"""
1497  assert(sequence == None)
1498  self.loadAndRemember(self.L1EMDefaultCFF)
1499  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1500  return
1501 
1502  def prepare_L1REPACK(self, sequence = None):
1503  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1504  supported = ['GT','GT1','GT2','GCTGT','Full']
1505  if sequence in supported:
1506  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1507  if self._options.scenario == 'HeavyIons':
1508  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1509  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1510  else:
1511  print "L1REPACK with '",sequence,"' is not supported! Supported choices are: ",supported
1512  raise Exception('unsupported feature')
1513 
1514 
1515  def prepare_HLT(self, sequence = None):
1516  """ Enrich the schedule with the HLT simulation step"""
1517  if not sequence:
1518  print "no specification of the hlt menu has been given, should never happen"
1519  raise Exception('no HLT sequence provided')
1520 
1521  if '@' in sequence:
1522  # case where HLT:@something was provided
1523  from Configuration.HLT.autoHLT import autoHLT
1524  key = sequence[1:]
1525  if key in autoHLT:
1526  sequence = autoHLT[key]
1527  else:
1528  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1529 
1530  if ',' in sequence:
1531  #case where HLT:something:something was provided
1532  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1533  optionsForHLT = {}
1534  if self._options.scenario == 'HeavyIons':
1535  optionsForHLT['type'] = 'HIon'
1536  else:
1537  optionsForHLT['type'] = 'GRun'
1538  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.iteritems())
1539  if sequence == 'run,fromSource':
1540  if hasattr(self.process.source,'firstRun'):
1541  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1542  elif hasattr(self.process.source,'setRunNumber'):
1543  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1544  else:
1545  raise Exception('Cannot replace menu to load %s'%(sequence))
1546  else:
1547  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(sequence.replace(',',':'),optionsForHLTConfig))
1548  else:
1549  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % sequence)
1550 
1551  if self._options.isMC:
1552  if self._options.fast:
1553  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFastSim")
1554  else:
1555  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFullSim")
1556 
1557  if self._options.name != 'HLT':
1558  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1559  self.additionalCommands.append('process = ProcessName(process)')
1560  self.additionalCommands.append('')
1561  from HLTrigger.Configuration.CustomConfigs import ProcessName
1562  self.process = ProcessName(self.process)
1563 
1564  self.schedule.append(self.process.HLTSchedule)
1565  [self.blacklist_paths.append(path) for path in self.process.HLTSchedule if isinstance(path,(cms.Path,cms.EndPath))]
1566 
1567  #this is a fake, to be removed with fastim migration and HLT menu dump
1568  if self._options.fast:
1569  if not hasattr(self.process,'HLTEndSequence'):
1570  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1571 
1572 
1573  def prepare_RAW2RECO(self, sequence = None):
1574  if ','in sequence:
1575  seqReco=sequence.split(',')[1]
1576  seqDigi=sequence.split(',')[0]
1577  else:
1578  print "RAW2RECO requires two specifications",sequence,"insufficient"
1579 
1580  self.prepare_RAW2DIGI(seqDigi)
1581  self.prepare_RECO(seqReco)
1582  return
1583 
1584  def prepare_RAW2DIGI(self, sequence = "RawToDigi"):
1585  self.loadDefaultOrSpecifiedCFF(sequence,self.RAW2DIGIDefaultCFF)
1586  self.scheduleSequence(sequence,'raw2digi_step')
1587  # if self._options.isRepacked:
1588  #self.renameInputTagsInSequence(sequence)
1589  return
1590 
1591  def prepare_PATFILTER(self, sequence=None):
1592  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1593  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1594  for filt in allMetFilterPaths:
1595  self.schedule.append(getattr(self.process,'Flag_'+filt))
1596 
1597  def prepare_L1HwVal(self, sequence = 'L1HwVal'):
1598  ''' Enrich the schedule with L1 HW validation '''
1599  self.loadDefaultOrSpecifiedCFF(sequence,self.L1HwValDefaultCFF)
1600  #self.scheduleSequence(sequence.split('.')[-1],'l1hwval_step')
1601  print '\n\n\n DEPRECATED this has no action \n\n\n'
1602  return
1603 
1604  def prepare_L1Reco(self, sequence = "L1Reco"):
1605  ''' Enrich the schedule with L1 reconstruction '''
1606  self.loadDefaultOrSpecifiedCFF(sequence,self.L1RecoDefaultCFF)
1607  self.scheduleSequence(sequence.split('.')[-1],'L1Reco_step')
1608  return
1609 
1610  def prepare_L1TrackTrigger(self, sequence = "L1TrackTrigger"):
1611  ''' Enrich the schedule with L1 reconstruction '''
1613  self.scheduleSequence(sequence.split('.')[-1],'L1TrackTrigger_step')
1614  return
1615 
1616  def prepare_FILTER(self, sequence = None):
1617  ''' Enrich the schedule with a user defined filter sequence '''
1618  ## load the relevant part
1619  filterConfig=self.load(sequence.split('.')[0])
1620  filterSeq=sequence.split('.')[-1]
1621  ## print it in the configuration
1622  class PrintAllModules(object):
1623  def __init__(self):
1624  self.inliner=''
1625  pass
1626  def enter(self,visitee):
1627  try:
1628  label=visitee.label()
1629  ##needs to be in reverse order
1630  self.inliner=label+','+self.inliner
1631  except:
1632  pass
1633  def leave(self,v): pass
1634 
1635  expander=PrintAllModules()
1636  getattr(self.process,filterSeq).visit( expander )
1637  self._options.inlineObjets+=','+expander.inliner
1638  self._options.inlineObjets+=','+filterSeq
1639 
1640  ## put the filtering path in the schedule
1641  self.scheduleSequence(filterSeq,'filtering_step')
1642  self.nextScheduleIsConditional=True
1643  ## put it before all the other paths
1644  self.productionFilterSequence = filterSeq
1645 
1646  return
1647 
1648  def prepare_RECO(self, sequence = "reconstruction"):
1649  ''' Enrich the schedule with reconstruction '''
1650  self.loadDefaultOrSpecifiedCFF(sequence,self.RECODefaultCFF)
1651  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_step')
1652  return
1653 
1654  def prepare_RECOBEFMIX(self, sequence = "reconstruction"):
1655  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1656  if not self._options.fast:
1657  print "ERROR: this step is only implemented for FastSim"
1658  sys.exit()
1659  self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1660  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_befmix_step')
1661  return
1662 
1663  def prepare_PAT(self, sequence = "miniAOD"):
1664  ''' Enrich the schedule with PAT '''
1665  self.prepare_PATFILTER(self)
1666  self.loadDefaultOrSpecifiedCFF(sequence,self.PATDefaultCFF,1) #this is unscheduled
1667  if not self._options.runUnscheduled:
1668  raise Exception("MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1669  if self._options.isData:
1670  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1671  else:
1672  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1673  if self._options.fast:
1674  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff.miniAOD_customizeMETFiltersFastSim")
1675  return
1676 
1677  def prepare_EI(self, sequence = None):
1678  ''' Enrich the schedule with event interpretation '''
1679  from Configuration.StandardSequences.EventInterpretation import EventInterpretation
1680  if sequence in EventInterpretation:
1681  self.EIDefaultCFF = EventInterpretation[sequence]
1682  sequence = 'EIsequence'
1683  else:
1684  raise Exception('Cannot set %s event interpretation'%( sequence) )
1685  self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1686  self.scheduleSequence(sequence.split('.')[-1],'eventinterpretaion_step')
1687  return
1688 
1689  def prepare_SKIM(self, sequence = "all"):
1690  ''' Enrich the schedule with skimming fragments'''
1691  skimConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.SKIMDefaultCFF)
1692  sequence = sequence.split('.')[-1]
1693 
1694  skimlist=sequence.split('+')
1695  ## support @Mu+DiJet+@Electron configuration via autoSkim.py
1696  from Configuration.Skimming.autoSkim import autoSkim
1697  self.expandMapping(skimlist,autoSkim)
1698 
1699  #print "dictionnary for skims:",skimConfig.__dict__
1700  for skim in skimConfig.__dict__:
1701  skimstream = getattr(skimConfig,skim)
1702  if isinstance(skimstream,cms.Path):
1703  #black list the alca path so that they do not appear in the cfg
1704  self.blacklist_paths.append(skimstream)
1705  if (not isinstance(skimstream,cms.FilteredStream)):
1706  continue
1707  shortname = skim.replace('SKIMStream','')
1708  if (sequence=="all"):
1709  self.addExtraStream(skim,skimstream)
1710  elif (shortname in skimlist):
1711  self.addExtraStream(skim,skimstream)
1712  #add a DQM eventcontent for this guy
1713  if self._options.datatier=='DQM':
1714  self.process.load(self.EVTCONTDefaultCFF)
1715  skimstreamDQM = cms.FilteredStream(
1716  responsible = skimstream.responsible,
1717  name = skimstream.name+'DQM',
1718  paths = skimstream.paths,
1719  selectEvents = skimstream.selectEvents,
1720  content = self._options.datatier+'EventContent',
1721  dataTier = cms.untracked.string(self._options.datatier)
1722  )
1723  self.addExtraStream(skim+'DQM',skimstreamDQM)
1724  for i in range(skimlist.count(shortname)):
1725  skimlist.remove(shortname)
1726 
1727 
1728 
1729  if (skimlist.__len__()!=0 and sequence!="all"):
1730  print 'WARNING, possible typo with SKIM:'+'+'.join(skimlist)
1731  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1732 
1733  def prepare_USER(self, sequence = None):
1734  ''' Enrich the schedule with a user defined sequence '''
1735  self.loadDefaultOrSpecifiedCFF(sequence,self.USERDefaultCFF)
1736  self.scheduleSequence(sequence.split('.')[-1],'user_step')
1737  return
1738 
1739  def prepare_POSTRECO(self, sequence = None):
1740  """ Enrich the schedule with the postreco step """
1741  self.loadAndRemember(self.POSTRECODefaultCFF)
1742  self.scheduleSequence('postreco_generator','postreco_step')
1743  return
1744 
1745 
1746  def prepare_VALIDATION(self, sequence = 'validation'):
1747  print sequence,"in preparing validation"
1748  self.loadDefaultOrSpecifiedCFF(sequence,self.VALIDATIONDefaultCFF)
1749  from Validation.Configuration.autoValidation import autoValidation
1750  #in case VALIDATION:something:somethingelse -> something,somethingelse
1751  sequence=sequence.split('.')[-1]
1752  if sequence.find(',')!=-1:
1753  prevalSeqName=sequence.split(',')[0].split('+')
1754  valSeqName=sequence.split(',')[1].split('+')
1755  self.expandMapping(prevalSeqName,autoValidation,index=0)
1756  self.expandMapping(valSeqName,autoValidation,index=1)
1757  else:
1758  if '@' in sequence:
1759  prevalSeqName=sequence.split('+')
1760  valSeqName=sequence.split('+')
1761  self.expandMapping(prevalSeqName,autoValidation,index=0)
1762  self.expandMapping(valSeqName,autoValidation,index=1)
1763  else:
1764  postfix=''
1765  if sequence:
1766  postfix='_'+sequence
1767  prevalSeqName=['prevalidation'+postfix]
1768  valSeqName=['validation'+postfix]
1769  if not hasattr(self.process,valSeqName[0]):
1770  prevalSeqName=['']
1771  valSeqName=[sequence]
1772 
1773  def NFI(index):
1774  ##name from index, required to keep backward compatibility
1775  if index==0:
1776  return ''
1777  else:
1778  return '%s'%index
1779 
1780 
1781  #rename the HLT process in validation steps
1782  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1783  for s in valSeqName+prevalSeqName:
1784  if s:
1786  for (i,s) in enumerate(prevalSeqName):
1787  if s:
1788  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1789  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1790 
1791  for (i,s) in enumerate(valSeqName):
1792  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1793  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1794 
1795  #needed in case the miniAODValidation sequence is run starting from AODSIM
1796  if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1797  return
1798 
1799  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1800  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1801  self._options.restoreRNDSeeds=True
1802 
1803  if not 'DIGI' in self.stepMap and not self._options.fast:
1804  self.executeAndRemember("process.mix.playback = True")
1805  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1806  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1807  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1808 
1809  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1810  #will get in the schedule, smoothly
1811  for (i,s) in enumerate(valSeqName):
1812  getattr(self.process,'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,'validation_step%s'%NFI(i))._seq
1813 
1814  return
1815 
1816 
1818  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1819  It will climb down within PSets, VPSets and VInputTags to find its target"""
1820  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1821  self._paramReplace = paramReplace
1822  self._paramSearch = paramSearch
1823  self._verbose = verbose
1824  self._whitelist = whitelist
1825 
1826  def doIt(self,pset,base):
1827  if isinstance(pset, cms._Parameterizable):
1828  for name in pset.parameters_().keys():
1829  # skip whitelisted parameters
1830  if name in self._whitelist:
1831  continue
1832  # if I use pset.parameters_().items() I get copies of the parameter values
1833  # so I can't modify the nested pset
1834  value = getattr(pset,name)
1835  type = value.pythonTypeName()
1836  if type in ('cms.PSet', 'cms.untracked.PSet'):
1837  self.doIt(value,base+"."+name)
1838  elif type in ('cms.VPSet', 'cms.untracked.VPSet'):
1839  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1840  elif type in ('cms.string', 'cms.untracked.string'):
1841  if value.value() == self._paramSearch:
1842  if self._verbose: print "set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace)
1843  setattr(pset, name,self._paramReplace)
1844  elif type in ('cms.VInputTag', 'cms.untracked.VInputTag'):
1845  for (i,n) in enumerate(value):
1846  if not isinstance(n, cms.InputTag):
1847  n=cms.InputTag(n)
1848  if n.processName == self._paramSearch:
1849  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1850  if self._verbose:print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace)
1851  setattr(n,"processName",self._paramReplace)
1852  value[i]=n
1853  elif type in ('cms.vstring', 'cms.untracked.vstring'):
1854  for (i,n) in enumerate(value):
1855  if n==self._paramSearch:
1856  getattr(pset,name)[i]=self._paramReplace
1857  elif type in ('cms.InputTag', 'cms.untracked.InputTag'):
1858  if value.processName == self._paramSearch:
1859  if self._verbose: print "set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace)
1860  setattr(getattr(pset, name),"processName",self._paramReplace)
1861 
1862  def enter(self,visitee):
1863  label = ''
1864  try:
1865  label = visitee.label()
1866  except AttributeError:
1867  label = '<Module not in a Process>'
1868  except:
1869  label = 'other execption'
1870  self.doIt(visitee, label)
1871 
1872  def leave(self,visitee):
1873  pass
1874 
1875  #visit a sequence to repalce all input tags
1876  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1877  print "Replacing all InputTag %s => %s"%(oldT,newT)
1878  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1879  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1880  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1881  if not loadMe in self.additionalCommands:
1882  self.additionalCommands.append(loadMe)
1883  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1884 
1885  #change the process name used to address HLT results in any sequence
1886  def renameHLTprocessInSequence(self,sequence,proc=None,HLTprocess='HLT'):
1887  if self._options.hltProcess:
1888  proc=self._options.hltProcess
1889  else:
1890  proc=self.process.name_()
1891  if proc==HLTprocess: return
1892  # look up all module in dqm sequence
1893  print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1894  getattr(self.process,sequence).visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess,proc,whitelist = ("subSystemFolder",)))
1895  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1896  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1897  self.additionalCommands.append('process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1898 
1899 
1900  def expandMapping(self,seqList,mapping,index=None):
1901  maxLevel=20
1902  level=0
1903  while '@' in repr(seqList) and level<maxLevel:
1904  level+=1
1905  for specifiedCommand in seqList:
1906  if specifiedCommand.startswith('@'):
1907  location=specifiedCommand[1:]
1908  if not location in mapping:
1909  raise Exception("Impossible to map "+location+" from "+repr(mapping))
1910  mappedTo=mapping[location]
1911  if index!=None:
1912  mappedTo=mappedTo[index]
1913  seqList.remove(specifiedCommand)
1914  seqList.extend(mappedTo.split('+'))
1915  break;
1916  if level==maxLevel:
1917  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
1918 
1919  def prepare_DQM(self, sequence = 'DQMOffline'):
1920  # this one needs replacement
1921 
1922  self.loadDefaultOrSpecifiedCFF(sequence,self.DQMOFFLINEDefaultCFF)
1923  sequenceList=sequence.split('.')[-1].split('+')
1924  postSequenceList=sequence.split('.')[-1].split('+')
1925  from DQMOffline.Configuration.autoDQM import autoDQM
1926  self.expandMapping(sequenceList,autoDQM,index=0)
1927  self.expandMapping(postSequenceList,autoDQM,index=1)
1928 
1929  if len(set(sequenceList))!=len(sequenceList):
1930  sequenceList=list(set(sequenceList))
1931  print "Duplicate entries for DQM:, using",sequenceList
1932 
1933  pathName='dqmoffline_step'
1934  for (i,sequence) in enumerate(sequenceList):
1935  if (i!=0):
1936  pathName='dqmoffline_%d_step'%(i)
1937 
1938  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
1939  self.renameHLTprocessInSequence(sequence)
1940 
1941  # if both HLT and DQM are run in the same process, schedule [HLT]DQM in an EndPath
1942  if 'HLT' in self.stepMap.keys():
1943  # need to put [HLT]DQM in an EndPath, to access the HLT trigger results
1944  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1945  else:
1946  # schedule DQM as a standard Path
1947  setattr(self.process,pathName, cms.Path( getattr(self.process, sequence) ) )
1948  self.schedule.append(getattr(self.process,pathName))
1949 
1950  pathName='dqmofflineOnPAT_step'
1951  for (i,sequence) in enumerate(postSequenceList):
1952  if (i!=0):
1953  pathName='dqmofflineOnPAT_%d_step'%(i)
1954 
1955  # if both MINIAOD and DQM are run in the same process, schedule DQM in an EndPath
1956  if 'PAT' in self.stepMap.keys():
1957  # need to put DQM in an EndPath, to access the miniAOD filter results
1958  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1959  else:
1960  # schedule DQM as a standard Path
1961  setattr(self.process,pathName, cms.Path( getattr(self.process, sequence) ) )
1962  self.schedule.append(getattr(self.process,pathName))
1963 
1964  def prepare_HARVESTING(self, sequence = None):
1965  """ Enrich the process with harvesting step """
1966  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
1967  self.loadAndRemember(self.DQMSaverCFF)
1968 
1969  harvestingConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.HARVESTINGDefaultCFF)
1970  sequence = sequence.split('.')[-1]
1971 
1972  # decide which HARVESTING paths to use
1973  harvestingList = sequence.split("+")
1974  from DQMOffline.Configuration.autoDQM import autoDQM
1975  from Validation.Configuration.autoValidation import autoValidation
1976  import copy
1977  combined_mapping = copy.deepcopy( autoDQM )
1978  combined_mapping.update( autoValidation )
1979  self.expandMapping(harvestingList,combined_mapping,index=-1)
1980 
1981  if len(set(harvestingList))!=len(harvestingList):
1982  harvestingList=list(set(harvestingList))
1983  print "Duplicate entries for HARVESTING, using",harvestingList
1984 
1985  for name in harvestingList:
1986  if not name in harvestingConfig.__dict__:
1987  print name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1988  continue
1989  harvestingstream = getattr(harvestingConfig,name)
1990  if isinstance(harvestingstream,cms.Path):
1991  self.schedule.append(harvestingstream)
1992  self.blacklist_paths.append(harvestingstream)
1993  if isinstance(harvestingstream,cms.Sequence):
1994  setattr(self.process,name+"_step",cms.Path(harvestingstream))
1995  self.schedule.append(getattr(self.process,name+"_step"))
1996 
1997  self.scheduleSequence('DQMSaver','dqmsave_step')
1998  return
1999 
2000  def prepare_ALCAHARVEST(self, sequence = None):
2001  """ Enrich the process with AlCaHarvesting step """
2002  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2003  sequence=sequence.split(".")[-1]
2004 
2005  # decide which AlcaHARVESTING paths to use
2006  harvestingList = sequence.split("+")
2007 
2008 
2009 
2010  from Configuration.AlCa.autoPCL import autoPCL
2011  self.expandMapping(harvestingList,autoPCL)
2012 
2013  for name in harvestingConfig.__dict__:
2014  harvestingstream = getattr(harvestingConfig,name)
2015  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2016  self.schedule.append(harvestingstream)
2017  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2018  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2019  harvestingList.remove(name)
2020  # append the common part at the end of the sequence
2021  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2022  self.schedule.append(lastStep)
2023 
2024  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2025  print "The following harvesting could not be found : ", harvestingList
2026  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2027 
2028 
2029 
2030  def prepare_ENDJOB(self, sequence = 'endOfProcess'):
2031  self.loadDefaultOrSpecifiedCFF(sequence,self.ENDJOBDefaultCFF)
2032  self.scheduleSequenceAtEnd(sequence.split('.')[-1],'endjob_step')
2033  return
2034 
2036  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2037  self.schedule.append(self.process.reconstruction)
2038 
2039 
2040  def build_production_info(self, evt_type, evtnumber):
2041  """ Add useful info for the production. """
2042  self.process.configurationMetadata=cms.untracked.PSet\
2043  (version=cms.untracked.string("$Revision: 1.19 $"),
2044  name=cms.untracked.string("Applications"),
2045  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2046  )
2047 
2048  self.addedObjects.append(("Production Info","configurationMetadata"))
2049 
2050 
2051  def prepare(self, doChecking = False):
2052  """ Prepare the configuration string and add missing pieces."""
2053 
2054  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2055  self.addMaxEvents()
2056  if self.with_input:
2057  self.addSource()
2058  self.addStandardSequences()
2059  ##adding standard sequences might change the inputEventContent option and therefore needs to be finalized after
2060  self.completeInputCommand()
2061  self.addConditions()
2062 
2063 
2064  outputModuleCfgCode=""
2065  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2066  outputModuleCfgCode=self.addOutput()
2067 
2068  self.addCommon()
2069 
2070  self.pythonCfgCode = "# Auto generated configuration file\n"
2071  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2072  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2073  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2074  if hasattr(self._options,"era") and self._options.era :
2075  self.pythonCfgCode += "from Configuration.StandardSequences.Eras import eras\n\n"
2076  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"'" # Start of the line, finished after the loop
2077  # Multiple eras can be specified in a comma seperated list
2078  for requestedEra in self._options.era.split(",") :
2079  self.pythonCfgCode += ",eras."+requestedEra
2080  self.pythonCfgCode += ")\n\n" # end of the line
2081  else :
2082  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"')\n\n"
2083 
2084  self.pythonCfgCode += "# import of standard configurations\n"
2085  for module in self.imports:
2086  self.pythonCfgCode += ("process.load('"+module+"')\n")
2087 
2088  # production info
2089  if not hasattr(self.process,"configurationMetadata"):
2090  self.build_production_info(self._options.evt_type, self._options.number)
2091  else:
2092  #the PSet was added via a load
2093  self.addedObjects.append(("Production Info","configurationMetadata"))
2094 
2095  self.pythonCfgCode +="\n"
2096  for comment,object in self.addedObjects:
2097  if comment!="":
2098  self.pythonCfgCode += "\n# "+comment+"\n"
2099  self.pythonCfgCode += dumpPython(self.process,object)
2100 
2101  # dump the output definition
2102  self.pythonCfgCode += "\n# Output definition\n"
2103  self.pythonCfgCode += outputModuleCfgCode
2104 
2105  # dump all additional outputs (e.g. alca or skim streams)
2106  self.pythonCfgCode += "\n# Additional output definition\n"
2107  #I do not understand why the keys are not normally ordered.
2108  nl=self.additionalOutputs.keys()
2109  nl.sort()
2110  for name in nl:
2111  output = self.additionalOutputs[name]
2112  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2113  tmpOut = cms.EndPath(output)
2114  setattr(self.process,name+'OutPath',tmpOut)
2115  self.schedule.append(tmpOut)
2116 
2117  # dump all additional commands
2118  self.pythonCfgCode += "\n# Other statements\n"
2119  for command in self.additionalCommands:
2120  self.pythonCfgCode += command + "\n"
2121 
2122  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2123  for object in self._options.inlineObjets.split(','):
2124  if not object:
2125  continue
2126  if not hasattr(self.process,object):
2127  print 'cannot inline -'+object+'- : not known'
2128  else:
2129  self.pythonCfgCode +='\n'
2130  self.pythonCfgCode +=dumpPython(self.process,object)
2131 
2132  # dump all paths
2133  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2134  for path in self.process.paths:
2135  if getattr(self.process,path) not in self.blacklist_paths:
2136  self.pythonCfgCode += dumpPython(self.process,path)
2137 
2138  for endpath in self.process.endpaths:
2139  if getattr(self.process,endpath) not in self.blacklist_paths:
2140  self.pythonCfgCode += dumpPython(self.process,endpath)
2141 
2142  # dump the schedule
2143  self.pythonCfgCode += "\n# Schedule definition\n"
2144  result = "process.schedule = cms.Schedule("
2145 
2146  # handling of the schedule
2147  self.process.schedule = cms.Schedule()
2148  for item in self.schedule:
2149  if not isinstance(item, cms.Schedule):
2150  self.process.schedule.append(item)
2151  else:
2152  self.process.schedule.extend(item)
2153 
2154  if hasattr(self.process,"HLTSchedule"):
2155  beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2156  afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2157  pathNames = ['process.'+p.label_() for p in beforeHLT]
2158  result += ','.join(pathNames)+')\n'
2159  result += 'process.schedule.extend(process.HLTSchedule)\n'
2160  pathNames = ['process.'+p.label_() for p in afterHLT]
2161  result += 'process.schedule.extend(['+','.join(pathNames)+'])\n'
2162  else:
2163  pathNames = ['process.'+p.label_() for p in self.schedule]
2164  result ='process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2165 
2166  self.pythonCfgCode += result
2167 
2168  if self._options.nThreads is not "1":
2169  self.pythonCfgCode +="\n"
2170  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2171  self.pythonCfgCode +="process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+")\n"
2172  self.pythonCfgCode +="process.options.numberOfStreams=cms.untracked.uint32(0)\n"
2173  #repacked version
2174  if self._options.isRepacked:
2175  self.pythonCfgCode +="\n"
2176  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2177  self.pythonCfgCode +="MassReplaceInputTag(process)\n"
2178  MassReplaceInputTag(self.process)
2179 
2180  # special treatment in case of production filter sequence 2/2
2181  if self.productionFilterSequence:
2182  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2183  self.pythonCfgCode +='for path in process.paths:\n'
2184  if len(self.conditionalPaths):
2185  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2186  if len(self.excludedPaths):
2187  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2188  self.pythonCfgCode +='\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2189  pfs = getattr(self.process,self.productionFilterSequence)
2190  for path in self.process.paths:
2191  if not path in self.conditionalPaths: continue
2192  if path in self.excludedPaths: continue
2193  getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2194 
2195 
2196  # dump customise fragment
2197  self.pythonCfgCode += self.addCustomise()
2198 
2199  if self._options.runUnscheduled:
2200  # prune and delete paths
2201  #this is not supporting the blacklist at this point since I do not understand it
2202  self.pythonCfgCode+="#do not add changes to your config after this point (unless you know what you are doing)\n"
2203  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import convertToUnscheduled\n"
2204  self.pythonCfgCode+="process=convertToUnscheduled(process)\n"
2205 
2206  from FWCore.ParameterSet.Utilities import convertToUnscheduled
2207  self.process=convertToUnscheduled(self.process)
2208 
2209  #now add the unscheduled stuff
2210  for module in self.importsUnsch:
2211  self.process.load(module)
2212  self.pythonCfgCode += ("process.load('"+module+"')\n")
2213 
2214  #and clean the unscheduled stuff
2215  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import cleanUnscheduled\n"
2216  self.pythonCfgCode+="process=cleanUnscheduled(process)\n"
2217 
2218  from FWCore.ParameterSet.Utilities import cleanUnscheduled
2219  self.process=cleanUnscheduled(self.process)
2220 
2221 
2222  self.pythonCfgCode += self.addCustomise(1)
2223 
2224 
2225  # make the .io file
2226 
2227  if self._options.io:
2228  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2229  if not self._options.io.endswith('.io'): self._option.io+='.io'
2230  io=open(self._options.io,'w')
2231  ioJson={}
2232  if hasattr(self.process.source,"fileNames"):
2233  if len(self.process.source.fileNames.value()):
2234  ioJson['primary']=self.process.source.fileNames.value()
2235  if hasattr(self.process.source,"secondaryFileNames"):
2236  if len(self.process.source.secondaryFileNames.value()):
2237  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2238  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2239  ioJson['pileup']=self._options.pileup_input[4:]
2240  for (o,om) in self.process.outputModules_().items():
2241  ioJson[o]=om.fileName.value()
2242  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2243  if self.productionFilterSequence:
2244  ioJson['filter']=self.productionFilterSequence
2245  import json
2246  io.write(json.dumps(ioJson))
2247  return
2248 
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:34
assert(m_qm.get())
def visit
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_RECO
put the filtering path in the schedule
def massSearchReplaceAnyInputTag
Definition: helpers.py:262
def defineMixing
Definition: Mixing.py:168
inliner
needs to be in reverse order
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def convertToUnscheduled
Definition: Utilities.py:69
def cleanUnscheduled
Definition: Utilities.py:107
double split
Definition: MVATrainer.cc:139
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run