test
CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python
2 
3 __version__ = "$Revision: 1.19 $"
4 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
5 
6 import FWCore.ParameterSet.Config as cms
7 from FWCore.ParameterSet.Modules import _Module
8 import sys
9 import re
10 import collections
11 from subprocess import Popen,PIPE
12 import FWCore.ParameterSet.DictTypes as DictTypes
13 class Options:
14  pass
15 
16 # the canonical defaults
17 defaultOptions = Options()
18 defaultOptions.datamix = 'DataOnSim'
19 defaultOptions.isMC=False
20 defaultOptions.isData=True
21 defaultOptions.step=''
22 defaultOptions.pileup='NoPileUp'
23 defaultOptions.pileup_input = None
24 defaultOptions.pileup_dasoption = ''
25 defaultOptions.geometry = 'SimDB'
26 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
27 defaultOptions.magField = ''
28 defaultOptions.conditions = None
29 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
30 defaultOptions.harvesting= 'AtRunEnd'
31 defaultOptions.gflash = False
32 defaultOptions.number = -1
33 defaultOptions.number_out = None
34 defaultOptions.arguments = ""
35 defaultOptions.name = "NO NAME GIVEN"
36 defaultOptions.evt_type = ""
37 defaultOptions.filein = ""
38 defaultOptions.dasquery=""
39 defaultOptions.dasoption=""
40 defaultOptions.secondfilein = ""
41 defaultOptions.customisation_file = []
42 defaultOptions.customisation_file_unsch = []
43 defaultOptions.customise_commands = ""
44 defaultOptions.inline_custom=False
45 defaultOptions.particleTable = 'pythiapdt'
46 defaultOptions.particleTableList = ['pythiapdt','pdt']
47 defaultOptions.dirin = ''
48 defaultOptions.dirout = ''
49 defaultOptions.filetype = 'EDM'
50 defaultOptions.fileout = 'output.root'
51 defaultOptions.filtername = ''
52 defaultOptions.lazy_download = False
53 defaultOptions.custom_conditions = ''
54 defaultOptions.hltProcess = ''
55 defaultOptions.eventcontent = None
56 defaultOptions.datatier = None
57 defaultOptions.inlineEventContent = True
58 defaultOptions.inlineObjets =''
59 defaultOptions.hideGen=False
60 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
61 defaultOptions.beamspot=None
62 defaultOptions.outputDefinition =''
63 defaultOptions.inputCommands = None
64 defaultOptions.outputCommands = None
65 defaultOptions.inputEventContent = ''
66 defaultOptions.dropDescendant = False
67 defaultOptions.relval = None
68 defaultOptions.slhc = None
69 defaultOptions.profile = None
70 defaultOptions.isRepacked = False
71 defaultOptions.restoreRNDSeeds = False
72 defaultOptions.donotDropOnInput = ''
73 defaultOptions.python_filename =''
74 defaultOptions.io=None
75 defaultOptions.lumiToProcess=None
76 defaultOptions.fast=False
77 defaultOptions.runsAndWeightsForMC = None
78 defaultOptions.runsScenarioForMC = None
79 defaultOptions.runUnscheduled = False
80 defaultOptions.timeoutOutput = False
81 defaultOptions.nThreads = '1'
82 
83 # some helper routines
84 def dumpPython(process,name):
85  theObject = getattr(process,name)
86  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
87  return "process."+name+" = " + theObject.dumpPython("process")
88  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
89  return "process."+name+" = " + theObject.dumpPython()+"\n"
90  else:
91  return "process."+name+" = " + theObject.dumpPython()+"\n"
92 def filesFromList(fileName,s=None):
93  import os
94  import FWCore.ParameterSet.Config as cms
95  prim=[]
96  sec=[]
97  for line in open(fileName,'r'):
98  if line.count(".root")>=2:
99  #two files solution...
100  entries=line.replace("\n","").split()
101  if not entries[0] in prim:
102  prim.append(entries[0])
103  if not entries[1] in sec:
104  sec.append(entries[1])
105  elif (line.find(".root")!=-1):
106  entry=line.replace("\n","")
107  if not entry in prim:
108  prim.append(entry)
109  if s:
110  if not hasattr(s,"fileNames"):
111  s.fileNames=cms.untracked.vstring(prim)
112  else:
113  s.fileNames.extend(prim)
114  if len(sec)!=0:
115  if not hasattr(s,"secondaryFileNames"):
116  s.secondaryFileNames=cms.untracked.vstring(sec)
117  else:
118  s.secondaryFileNames.extend(sec)
119  print "found files: ",prim
120  if len(prim)==0:
121  raise Exception("There are not files in input from the file list")
122  if len(sec)!=0:
123  print "found parent files:",sec
124  return (prim,sec)
125 
126 def filesFromDASQuery(query,option="",s=None):
127  import os,time
128  import FWCore.ParameterSet.Config as cms
129  prim=[]
130  sec=[]
131  print "the query is",query
132  eC=5
133  count=0
134  while eC!=0 and count<3:
135  if count!=0:
136  print 'Sleeping, then retrying DAS'
137  time.sleep(100)
138  p = Popen('das_client %s --query "%s"'%(option,query), stdout=PIPE,shell=True)
139  pipe=p.stdout.read()
140  tupleP = os.waitpid(p.pid, 0)
141  eC=tupleP[1]
142  count=count+1
143  if eC==0:
144  print "DAS succeeded after",count,"attempts",eC
145  else:
146  print "DAS failed 3 times- I give up"
147  for line in pipe.split('\n'):
148  if line.count(".root")>=2:
149  #two files solution...
150  entries=line.replace("\n","").split()
151  if not entries[0] in prim:
152  prim.append(entries[0])
153  if not entries[1] in sec:
154  sec.append(entries[1])
155  elif (line.find(".root")!=-1):
156  entry=line.replace("\n","")
157  if not entry in prim:
158  prim.append(entry)
159  if s:
160  if not hasattr(s,"fileNames"):
161  s.fileNames=cms.untracked.vstring(prim)
162  else:
163  s.fileNames.extend(prim)
164  if len(sec)!=0:
165  if not hasattr(s,"secondaryFileNames"):
166  s.secondaryFileNames=cms.untracked.vstring(sec)
167  else:
168  s.secondaryFileNames.extend(sec)
169  print "found files: ",prim
170  if len(sec)!=0:
171  print "found parent files:",sec
172  return (prim,sec)
173 
174 def MassReplaceInputTag(aProcess,oldT="rawDataCollector",newT="rawDataRepacker"):
175  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
176  for s in aProcess.paths_().keys():
177  massSearchReplaceAnyInputTag(getattr(aProcess,s),oldT,newT)
178 
179 def anyOf(listOfKeys,dict,opt=None):
180  for k in listOfKeys:
181  if k in dict:
182  toReturn=dict[k]
183  dict.pop(k)
184  return toReturn
185  if opt!=None:
186  return opt
187  else:
188  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
189 
190 class ConfigBuilder(object):
191  """The main building routines """
192 
193  def __init__(self, options, process = None, with_output = False, with_input = False ):
194  """options taken from old cmsDriver and optparse """
195 
196  options.outfile_name = options.dirout+options.fileout
197 
198  self._options = options
199 
200  if self._options.isData and options.isMC:
201  raise Exception("ERROR: You may specify only --data or --mc, not both")
202  #if not self._options.conditions:
203  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
204 
205  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
206  if 'ENDJOB' in self._options.step:
207  if (hasattr(self._options,"outputDefinition") and \
208  self._options.outputDefinition != '' and \
209  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
210  (hasattr(self._options,"datatier") and \
211  self._options.datatier and \
212  'DQMIO' in self._options.datatier):
213  print "removing ENDJOB from steps since not compatible with DQMIO dataTier"
214  self._options.step=self._options.step.replace(',ENDJOB','')
215 
216 
217 
218  # what steps are provided by this class?
219  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
220  self.stepMap={}
221  self.stepKeys=[]
222  for step in self._options.step.split(","):
223  if step=='': continue
224  stepParts = step.split(":")
225  stepName = stepParts[0]
226  if stepName not in stepList and not stepName.startswith('re'):
227  raise ValueError("Step "+stepName+" unknown")
228  if len(stepParts)==1:
229  self.stepMap[stepName]=""
230  elif len(stepParts)==2:
231  self.stepMap[stepName]=stepParts[1].split('+')
232  elif len(stepParts)==3:
233  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
234  else:
235  raise ValueError("Step definition "+step+" invalid")
236  self.stepKeys.append(stepName)
237 
238  #print "map of steps is:",self.stepMap
239 
240  self.with_output = with_output
241  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
242  self.with_output = False
243  self.with_input = with_input
244  if process == None:
245  self.process = cms.Process(self._options.name)
246  else:
247  self.process = process
248  self.imports = []
249  self.importsUnsch = []
250  self.define_Configs()
251  self.schedule = list()
252 
253  # we are doing three things here:
254  # creating a process to catch errors
255  # building the code to re-create the process
256 
257  self.additionalCommands = []
258  # TODO: maybe a list of to be dumped objects would help as well
259  self.blacklist_paths = []
260  self.addedObjects = []
261  self.additionalOutputs = {}
262 
263  self.productionFilterSequence = None
264  self.nextScheduleIsConditional=False
265  self.conditionalPaths=[]
266  self.excludedPaths=[]
267 
268  def profileOptions(self):
269  """
270  addIgProfService
271  Function to add the igprof profile service so that you can dump in the middle
272  of the run.
273  """
274  profileOpts = self._options.profile.split(':')
275  profilerStart = 1
276  profilerInterval = 100
277  profilerFormat = None
278  profilerJobFormat = None
279 
280  if len(profileOpts):
281  #type, given as first argument is unused here
282  profileOpts.pop(0)
283  if len(profileOpts):
284  startEvent = profileOpts.pop(0)
285  if not startEvent.isdigit():
286  raise Exception("%s is not a number" % startEvent)
287  profilerStart = int(startEvent)
288  if len(profileOpts):
289  eventInterval = profileOpts.pop(0)
290  if not eventInterval.isdigit():
291  raise Exception("%s is not a number" % eventInterval)
292  profilerInterval = int(eventInterval)
293  if len(profileOpts):
294  profilerFormat = profileOpts.pop(0)
295 
296 
297  if not profilerFormat:
298  profilerFormat = "%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace("_cfi", ""),
299  self._options.step,
300  self._options.pileup,
301  self._options.conditions,
302  self._options.datatier,
303  self._options.profileTypeLabel)
304  if not profilerJobFormat and profilerFormat.endswith(".gz"):
305  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
306  elif not profilerJobFormat:
307  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
308 
309  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
310 
311  def load(self,includeFile):
312  includeFile = includeFile.replace('/','.')
313  self.process.load(includeFile)
314  return sys.modules[includeFile]
315 
316  def loadAndRemember(self, includeFile,unsch=0):
317  """helper routine to load am memorize imports"""
318  # we could make the imports a on-the-fly data method of the process instance itself
319  # not sure if the latter is a good idea
320  includeFile = includeFile.replace('/','.')
321  if unsch==0:
322  self.imports.append(includeFile)
323  self.process.load(includeFile)
324  return sys.modules[includeFile]
325  else:
326  self.importsUnsch.append(includeFile)
327  return 0#sys.modules[includeFile]
328 
329  def executeAndRemember(self, command):
330  """helper routine to remember replace statements"""
331  self.additionalCommands.append(command)
332  if not command.strip().startswith("#"):
333  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
334  import re
335  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
336  #exec(command.replace("process.","self.process."))
337 
338  def addCommon(self):
339  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
340  self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring('ProductNotFound'),fileMode = cms.untracked.string('FULLMERGE'))
341  else:
342  self.process.options = cms.untracked.PSet( )
343 
344  if self._options.runUnscheduled:
345  self.process.options.allowUnscheduled=cms.untracked.bool(True)
346 
347  self.addedObjects.append(("","options"))
348 
349  if self._options.lazy_download:
350  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
351  stats = cms.untracked.bool(True),
352  enable = cms.untracked.bool(True),
353  cacheHint = cms.untracked.string("lazy-download"),
354  readHint = cms.untracked.string("read-ahead-buffered")
355  )
356  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
357 
358  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
359  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
360 
361  if self._options.profile:
362  (start, interval, eventFormat, jobFormat)=self.profileOptions()
363  self.process.IgProfService = cms.Service("IgProfService",
364  reportFirstEvent = cms.untracked.int32(start),
365  reportEventInterval = cms.untracked.int32(interval),
366  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
367  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
368  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
369 
370  def addMaxEvents(self):
371  """Here we decide how many evts will be processed"""
372  self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(int(self._options.number)))
373  if self._options.number_out:
374  self.process.maxEvents.output = cms.untracked.int32(int(self._options.number_out))
375  self.addedObjects.append(("","maxEvents"))
376 
377  def addSource(self):
378  """Here the source is built. Priority: file, generator"""
379  self.addedObjects.append(("Input source","source"))
380 
381  def filesFromOption(self):
382  for entry in self._options.filein.split(','):
383  print "entry",entry
384  if entry.startswith("filelist:"):
385  filesFromList(entry[9:],self.process.source)
386  elif entry.startswith("dbs:") or entry.startswith("das:"):
387  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
388  else:
389  self.process.source.fileNames.append(self._options.dirin+entry)
390  if self._options.secondfilein:
391  if not hasattr(self.process.source,"secondaryFileNames"):
392  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
393  for entry in self._options.secondfilein.split(','):
394  print "entry",entry
395  if entry.startswith("filelist:"):
396  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
397  elif entry.startswith("dbs:") or entry.startswith("das:"):
398  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
399  else:
400  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
401 
402  if self._options.filein or self._options.dasquery:
403  if self._options.filetype == "EDM":
404  self.process.source=cms.Source("PoolSource",
405  fileNames = cms.untracked.vstring(),
406  secondaryFileNames= cms.untracked.vstring())
407  filesFromOption(self)
408  elif self._options.filetype == "DAT":
409  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
410  filesFromOption(self)
411  elif self._options.filetype == "LHE":
412  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
413  if self._options.filein.startswith("lhe:"):
414  #list the article directory automatically
415  args=self._options.filein.split(':')
416  article=args[1]
417  print 'LHE input from article ',article
418  location='/store/lhe/'
419  import os
420  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
421  for line in textOfFiles:
422  for fileName in [x for x in line.split() if '.lhe' in x]:
423  self.process.source.fileNames.append(location+article+'/'+fileName)
424  if len(args)>2:
425  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
426  else:
427  filesFromOption(self)
428 
429 
430  elif self._options.filetype == "DQM":
431  self.process.source=cms.Source("DQMRootSource",
432  fileNames = cms.untracked.vstring())
433  filesFromOption(self)
434 
435  elif self._options.filetype == "DQMDAQ":
436  # FIXME: how to configure it if there are no input files specified?
437  self.process.source=cms.Source("DQMStreamerReader")
438 
439 
440  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
441  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
442 
443  if self._options.dasquery!='':
444  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
445  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
446 
447  ##drop LHEXMLStringProduct on input to save memory if appropriate
448  if 'GEN' in self.stepMap.keys():
449  if self._options.inputCommands:
450  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
451  else:
452  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
453 
454  if self.process.source and self._options.inputCommands:
455  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
456  for command in self._options.inputCommands.split(','):
457  # remove whitespace around the keep/drop statements
458  command = command.strip()
459  if command=='': continue
460  self.process.source.inputCommands.append(command)
461  if not self._options.dropDescendant:
462  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
463 
464  if self._options.lumiToProcess:
465  import FWCore.PythonUtilities.LumiList as LumiList
466  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
467 
468  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
469  if self.process.source is None:
470  self.process.source=cms.Source("EmptySource")
471 
472  # modify source in case of run-dependent MC
473  self.runsAndWeights=None
474  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
475  if not self._options.isMC :
476  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
477  if self._options.runsAndWeightsForMC:
478  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
479  else:
480  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
481  if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
482  __import__(RunsAndWeights[self._options.runsScenarioForMC])
483  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
484  else:
485  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
486 
487  if self.runsAndWeights:
488  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
489  ThrowAndSetRandomRun.throwAndSetRandomRun(self.process.source,self.runsAndWeights)
490  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
491  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
492 
493  return
494 
495  def addOutput(self):
496  """ Add output module to the process """
497  result=""
498  if self._options.outputDefinition:
499  if self._options.datatier:
500  print "--datatier & --eventcontent options ignored"
501 
502  #new output convention with a list of dict
503  outList = eval(self._options.outputDefinition)
504  for (id,outDefDict) in enumerate(outList):
505  outDefDictStr=outDefDict.__str__()
506  if not isinstance(outDefDict,dict):
507  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
508  #requires option: tier
509  theTier=anyOf(['t','tier','dataTier'],outDefDict)
510  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
511  ## event content
512  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
513  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
514  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
515  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
516  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
517  # module label has a particular role
518  if not theModuleLabel:
519  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
520  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
521  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
522  ]
523  for name in tryNames:
524  if not hasattr(self.process,name):
525  theModuleLabel=name
526  break
527  if not theModuleLabel:
528  raise Exception("cannot find a module label for specification: "+outDefDictStr)
529  if id==0:
530  defaultFileName=self._options.outfile_name
531  else:
532  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
533 
534  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
535  if not theFileName.endswith('.root'):
536  theFileName+='.root'
537 
538  if len(outDefDict.keys()):
539  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
540  if theStreamType=='DQMIO': theStreamType='DQM'
541  if theStreamType=='ALL':
542  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
543  else:
544  theEventContent = getattr(self.process, theStreamType+"EventContent")
545 
546 
547  addAlCaSelects=False
548  if theStreamType=='ALCARECO' and not theFilterName:
549  theFilterName='StreamALCACombined'
550  addAlCaSelects=True
551 
552  CppType='PoolOutputModule'
553  if self._options.timeoutOutput:
554  CppType='TimeoutPoolOutputModule'
555  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
556  output = cms.OutputModule(CppType,
557  theEventContent.clone(),
558  fileName = cms.untracked.string(theFileName),
559  dataset = cms.untracked.PSet(
560  dataTier = cms.untracked.string(theTier),
561  filterName = cms.untracked.string(theFilterName))
562  )
563  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
564  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
565  if not theSelectEvent and hasattr(self.process,'filtering_step'):
566  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
567  if theSelectEvent:
568  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
569 
570  if addAlCaSelects:
571  if not hasattr(output,'SelectEvents'):
572  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
573  for alca in self.AlCaPaths:
574  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
575 
576 
577  if hasattr(self.process,theModuleLabel):
578  raise Exception("the current process already has a module "+theModuleLabel+" defined")
579  #print "creating output module ",theModuleLabel
580  setattr(self.process,theModuleLabel,output)
581  outputModule=getattr(self.process,theModuleLabel)
582  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
583  path=getattr(self.process,theModuleLabel+'_step')
584  self.schedule.append(path)
585 
586  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
587  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"):
588  return label
589  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
590  if theExtraOutputCommands:
591  if not isinstance(theExtraOutputCommands,list):
592  raise Exception("extra ouput command in --option must be a list of strings")
593  if hasattr(self.process,theStreamType+"EventContent"):
594  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
595  else:
596  outputModule.outputCommands.extend(theExtraOutputCommands)
597 
598  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
599 
600  ##ends the --output options model
601  return result
602 
603  streamTypes=self._options.eventcontent.split(',')
604  tiers=self._options.datatier.split(',')
605  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
606  raise Exception("number of event content arguments does not match number of datatier arguments")
607 
608  # if the only step is alca we don't need to put in an output
609  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
610  return "\n"
611 
612  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
613  if streamType=='': continue
614  if streamType=='DQMIO': streamType='DQM'
615  theEventContent = getattr(self.process, streamType+"EventContent")
616  if i==0:
617  theFileName=self._options.outfile_name
618  theFilterName=self._options.filtername
619  else:
620  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
621  theFilterName=self._options.filtername
622  CppType='PoolOutputModule'
623  if self._options.timeoutOutput:
624  CppType='TimeoutPoolOutputModule'
625  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
626  output = cms.OutputModule(CppType,
627  theEventContent,
628  fileName = cms.untracked.string(theFileName),
629  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
630  filterName = cms.untracked.string(theFilterName)
631  )
632  )
633  if hasattr(self.process,"generation_step") and streamType!='LHE':
634  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
635  if hasattr(self.process,"filtering_step"):
636  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
637 
638  if streamType=='ALCARECO':
639  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
640 
641  if "MINIAOD" in streamType:
642  output.dropMetaData = cms.untracked.string('ALL')
643  output.fastCloning= cms.untracked.bool(False)
644  output.overrideInputFileSplitLevels = cms.untracked.bool(True)
645 
646  outputModuleName=streamType+'output'
647  setattr(self.process,outputModuleName,output)
648  outputModule=getattr(self.process,outputModuleName)
649  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
650  path=getattr(self.process,outputModuleName+'_step')
651  self.schedule.append(path)
652 
653  if self._options.outputCommands and streamType!='DQM':
654  for evct in self._options.outputCommands.split(','):
655  if not evct: continue
656  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
657 
658  if not self._options.inlineEventContent:
659  def doNotInlineEventContent(instance,label = "process."+streamType+"EventContent.outputCommands"):
660  return label
661  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
662 
663  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
664 
665  return result
666 
668  """
669  Add selected standard sequences to the process
670  """
671  # load the pile up file
672  if self._options.pileup:
673  pileupSpec=self._options.pileup.split(',')[0]
674 
675  # Does the requested pile-up scenario exist?
676  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
677  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
678  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
679  raise Exception(message)
680 
681  # Put mixing parameters in a dictionary
682  if '.' in pileupSpec:
683  mixingDict={'file':pileupSpec}
684  elif pileupSpec.startswith('file:'):
685  mixingDict={'file':pileupSpec[5:]}
686  else:
687  import copy
688  mixingDict=copy.copy(Mixing[pileupSpec])
689  if len(self._options.pileup.split(','))>1:
690  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
691 
692  # Load the pu cfg file corresponding to the requested pu scenario
693  if 'file:' in pileupSpec:
694  #the file is local
695  self.process.load(mixingDict['file'])
696  print "inlining mixing module configuration"
697  self._options.inlineObjets+=',mix'
698  else:
699  self.loadAndRemember(mixingDict['file'])
700 
701  mixingDict.pop('file')
702  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
703  if self._options.pileup_input:
704  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
705  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
706  else:
707  mixingDict['F']=self._options.pileup_input.split(',')
708  specialization=defineMixing(mixingDict)
709  for command in specialization:
710  self.executeAndRemember(command)
711  if len(mixingDict)!=0:
712  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
713 
714 
715  # load the geometry file
716  try:
717  if len(self.stepMap):
718  self.loadAndRemember(self.GeometryCFF)
719  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
720  self.loadAndRemember(self.SimGeometryCFF)
721  if self.geometryDBLabel:
722  self.executeAndRemember('process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
723  except ImportError:
724  print "Geometry option",self._options.geometry,"unknown."
725  raise
726 
727  if len(self.stepMap):
728  self.loadAndRemember(self.magFieldCFF)
729 
730  for stepName in self.stepKeys:
731  stepSpec = self.stepMap[stepName]
732  print "Step:", stepName,"Spec:",stepSpec
733  if stepName.startswith('re'):
734  ##add the corresponding input content
735  if stepName[2:] not in self._options.donotDropOnInput:
736  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
737  stepName=stepName[2:]
738  if stepSpec=="":
739  getattr(self,"prepare_"+stepName)(sequence = getattr(self,stepName+"DefaultSeq"))
740  elif type(stepSpec)==list:
741  getattr(self,"prepare_"+stepName)(sequence = '+'.join(stepSpec))
742  elif type(stepSpec)==tuple:
743  getattr(self,"prepare_"+stepName)(sequence = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
744  else:
745  raise ValueError("Invalid step definition")
746 
747  if self._options.restoreRNDSeeds!=False:
748  #it is either True, or a process name
749  if self._options.restoreRNDSeeds==True:
750  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
751  else:
752  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
753  if self._options.inputEventContent or self._options.inputCommands:
754  if self._options.inputCommands:
755  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
756  else:
757  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
758 
759 
761  if self._options.inputEventContent:
762  import copy
763  def dropSecondDropStar(iec):
764  #drop occurence of 'drop *' in the list
765  count=0
766  for item in iec:
767  if item=='drop *':
768  if count!=0:
769  iec.remove(item)
770  count+=1
771 
772 
773  ## allow comma separated input eventcontent
774  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
775  for evct in self._options.inputEventContent.split(','):
776  if evct=='': continue
777  theEventContent = getattr(self.process, evct+"EventContent")
778  if hasattr(theEventContent,'outputCommands'):
779  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
780  if hasattr(theEventContent,'inputCommands'):
781  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
782 
783  dropSecondDropStar(self.process.source.inputCommands)
784 
785  if not self._options.dropDescendant:
786  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
787 
788 
789  return
790 
791  def addConditions(self):
792  """Add conditions to the process"""
793  if not self._options.conditions: return
794 
795  if 'FrontierConditions_GlobalTag' in self._options.conditions:
796  print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line'
797  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
798 
799  self.loadAndRemember(self.ConditionsDefaultCFF)
800  from Configuration.AlCa.GlobalTag import GlobalTag
801  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
802  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
803  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
804 
805  if self._options.slhc:
806  self.loadAndRemember("SLHCUpgradeSimulations/Geometry/fakeConditions_%s_cff"%(self._options.slhc,))
807 
808 
809  def addCustomise(self,unsch=0):
810  """Include the customise code """
811 
812  custOpt=[]
813  if unsch==0:
814  for c in self._options.customisation_file:
815  custOpt.extend(c.split(","))
816  else:
817  for c in self._options.customisation_file_unsch:
818  custOpt.extend(c.split(","))
819 
821  for opt in custOpt:
822  if opt=='': continue
823  if opt.count('.')>1:
824  raise Exception("more than . in the specification:"+opt)
825  fileName=opt.split('.')[0]
826  if opt.count('.')==0: rest='customise'
827  else:
828  rest=opt.split('.')[1]
829  if rest=='py': rest='customise' #catch the case of --customise file.py
830 
831  if fileName in custMap:
832  custMap[fileName].extend(rest.split('+'))
833  else:
834  custMap[fileName]=rest.split('+')
835 
836  if len(custMap)==0:
837  final_snippet='\n'
838  else:
839  final_snippet='\n# customisation of the process.\n'
840 
841  allFcn=[]
842  for opt in custMap:
843  allFcn.extend(custMap[opt])
844  for fcn in allFcn:
845  if allFcn.count(fcn)!=1:
846  raise Exception("cannot specify twice "+fcn+" as a customisation method")
847 
848  for f in custMap:
849  # let python search for that package and do syntax checking at the same time
850  packageName = f.replace(".py","").replace("/",".")
851  __import__(packageName)
852  package = sys.modules[packageName]
853 
854  # now ask the package for its definition and pick .py instead of .pyc
855  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
856 
857  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
858  if self._options.inline_custom:
859  for line in file(customiseFile,'r'):
860  if "import FWCore.ParameterSet.Config" in line:
861  continue
862  final_snippet += line
863  else:
864  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
865  for fcn in custMap[f]:
866  print "customising the process with",fcn,"from",f
867  if not hasattr(package,fcn):
868  #bound to fail at run time
869  raise Exception("config "+f+" has no function "+fcn)
870  #execute the command
871  self.process=getattr(package,fcn)(self.process)
872  #and print it in the configuration
873  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
874  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
875 
876  if len(custMap)!=0:
877  final_snippet += '\n# End of customisation functions\n'
878 
879  ### now for a useful command
880  if unsch==1 or not self._options.runUnscheduled:
881  if self._options.customise_commands:
882  import string
883  final_snippet +='\n# Customisation from command line'
884  for com in self._options.customise_commands.split('\\n'):
885  com=string.lstrip(com)
886  self.executeAndRemember(com)
887  final_snippet +='\n'+com
888 
889  return final_snippet
890 
891  #----------------------------------------------------------------------------
892  # here the methods to define the python includes for each step or
893  # conditions
894  #----------------------------------------------------------------------------
895  def define_Configs(self):
896  if len(self.stepMap):
897  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
898  if self._options.particleTable not in defaultOptions.particleTableList:
899  print 'Invalid particle table provided. Options are:'
900  print defaultOptions.particleTable
901  sys.exit(-1)
902  else:
903  if len(self.stepMap):
904  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
905 
906  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
907 
908  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
909  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
910  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
911  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
912  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
913  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
914  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
915  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
916  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
917  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
918  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
919  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
920  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
921  self.EIDefaultCFF=None
922  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
923  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
924  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
925  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
926  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
927  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
928  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
929  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
930  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
931  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
932  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
933 
934  if "DATAMIX" in self.stepMap.keys():
935  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
936  if self._options.datamix == 'PreMix':
937  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDMPreMix_cff"
938  else:
939  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
940  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
941  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
942 
943  if "DIGIPREMIX" in self.stepMap.keys():
944  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_PreMix_cff"
945 
946  self.ALCADefaultSeq=None
947  self.LHEDefaultSeq='externalLHEProducer'
948  self.GENDefaultSeq='pgen'
949  self.SIMDefaultSeq='psim'
950  self.DIGIDefaultSeq='pdigi'
951  self.DIGIPREMIXDefaultSeq='pdigi'
952  self.DIGIPREMIX_S2DefaultSeq='pdigi'
953  self.DATAMIXDefaultSeq=None
954  self.DIGI2RAWDefaultSeq='DigiToRaw'
955  self.HLTDefaultSeq='GRun'
956  self.L1DefaultSeq=None
957  self.L1REPACKDefaultSeq='GT'
958  self.HARVESTINGDefaultSeq=None
959  self.ALCAHARVESTDefaultSeq=None
960  self.CFWRITERDefaultSeq=None
961  self.RAW2DIGIDefaultSeq='RawToDigi'
962  self.L1RecoDefaultSeq='L1Reco'
963  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
964  if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
965  self.RECODefaultSeq='reconstruction'
966  else:
967  self.RECODefaultSeq='reconstruction_fromRECO'
968 
969  self.EIDefaultSeq='top'
970  self.POSTRECODefaultSeq=None
971  self.L1HwValDefaultSeq='L1HwVal'
972  self.DQMDefaultSeq='DQMOffline'
973  self.VALIDATIONDefaultSeq=''
974  self.ENDJOBDefaultSeq='endOfProcess'
975  self.REPACKDefaultSeq='DigiToRawRepack'
976  self.PATDefaultSeq='miniAOD'
977 
978  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
979 
980  if not self._options.beamspot:
981  self._options.beamspot=VtxSmearedDefaultKey
982 
983  # if its MC then change the raw2digi
984  if self._options.isMC==True:
985  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
986  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
987  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
988  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
989  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
990  else:
991  self._options.beamspot = None
992 
993  #patch for gen, due to backward incompatibility
994  if 'reGEN' in self.stepMap:
995  self.GENDefaultSeq='fixGenInfo'
996 
997  if self._options.scenario=='cosmics':
998  self._options.pileup='Cosmics'
999  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1000  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1001  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1002  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1003  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1004  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1005  if self._options.isMC==True:
1006  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1007  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1008  self.RECODefaultSeq='reconstructionCosmics'
1009  self.DQMDefaultSeq='DQMOfflineCosmics'
1010 
1011  if self._options.scenario=='HeavyIons':
1012  if not self._options.beamspot:
1013  self._options.beamspot=VtxSmearedHIDefaultKey
1014  self.HLTDefaultSeq = 'HIon'
1015  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1016  self.VALIDATIONDefaultSeq=''
1017  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1018  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1019  self.RECODefaultSeq='reconstructionHeavyIons'
1020  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1021  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1022  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1023  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1024  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1025  if self._options.isMC==True:
1026  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1027 
1028 
1029  self.RAW2RECODefaultSeq=','.join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1030 
1031  self.USERDefaultSeq='user'
1032  self.USERDefaultCFF=None
1033 
1034  # the magnetic field
1035  if self._options.isData:
1036  if self._options.magField==defaultOptions.magField:
1037  print "magnetic field option forced to: AutoFromDBCurrent"
1038  self._options.magField='AutoFromDBCurrent'
1039  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1040  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1041 
1042  # the geometry
1043  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1044  self.geometryDBLabel=None
1045  simGeometry=''
1046  if self._options.fast:
1047  if 'start' in self._options.conditions.lower():
1048  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1049  else:
1050  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1051  else:
1052  def inGeometryKeys(opt):
1053  from Configuration.StandardSequences.GeometryConf import GeometryConf
1054  if opt in GeometryConf:
1055  return GeometryConf[opt]
1056  else:
1057  return opt
1058 
1059  geoms=self._options.geometry.split(',')
1060  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1061  if len(geoms)==2:
1062  #may specify the reco geometry
1063  if '/' in geoms[1] or '_cff' in geoms[1]:
1064  self.GeometryCFF=geoms[1]
1065  else:
1066  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1067 
1068  if (geoms[0].startswith('DB:')):
1069  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1070  self.geometryDBLabel=geoms[0][3:]
1071  print "with DB:"
1072  else:
1073  if '/' in geoms[0] or '_cff' in geoms[0]:
1074  self.SimGeometryCFF=geoms[0]
1075  else:
1076  simGeometry=geoms[0]
1077  if self._options.gflash==True:
1078  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1079  else:
1080  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1081 
1082  # synchronize the geometry configuration and the FullSimulation sequence to be used
1083  if simGeometry not in defaultOptions.geometryExtendedOptions:
1084  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1085 
1086  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1087  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1088  self._options.beamspot='NoSmear'
1089 
1090  # fastsim requires some changes to the default cff files and sequences
1091  if self._options.fast:
1092  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1093  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1094  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1095  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1096  self.L1RecoDefaultCFF='FastSimulation.Configuration.L1Reco_cff'
1097  self.DQMOFFLINEDefaultCFF="FastSimulation.Configuration.DQMOfflineMC_cff"
1098 
1099  # Mixing
1100  if self._options.pileup=='default':
1101  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1102  self._options.pileup=MixingDefaultKey
1103 
1104 
1105  #not driven by a default cff anymore
1106  if self._options.isData:
1107  self._options.pileup=None
1108 
1109  if self._options.slhc:
1110  self.GeometryCFF='SLHCUpgradeSimulations.Geometry.%s_cmsSimIdealGeometryXML_cff'%(self._options.slhc,)
1111  if 'stdgeom' not in self._options.slhc:
1112  self.SimGeometryCFF='SLHCUpgradeSimulations.Geometry.%s_cmsSimIdealGeometryXML_cff'%(self._options.slhc,)
1113  self.DIGIDefaultCFF='SLHCUpgradeSimulations/Geometry/Digi_%s_cff'%(self._options.slhc,)
1114  if self._options.pileup!=defaultOptions.pileup:
1115  self._options.pileup='SLHC_%s_%s'%(self._options.pileup,self._options.slhc)
1116 
1117  self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1118 
1119  # for alca, skims, etc
1120  def addExtraStream(self,name,stream,workflow='full'):
1121  # define output module and go from there
1122  output = cms.OutputModule("PoolOutputModule")
1123  if stream.selectEvents.parameters_().__len__()!=0:
1124  output.SelectEvents = stream.selectEvents
1125  else:
1126  output.SelectEvents = cms.untracked.PSet()
1127  output.SelectEvents.SelectEvents=cms.vstring()
1128  if isinstance(stream.paths,tuple):
1129  for path in stream.paths:
1130  output.SelectEvents.SelectEvents.append(path.label())
1131  else:
1132  output.SelectEvents.SelectEvents.append(stream.paths.label())
1133 
1134 
1135 
1136  if isinstance(stream.content,str):
1137  evtPset=getattr(self.process,stream.content)
1138  for p in evtPset.parameters_():
1139  setattr(output,p,getattr(evtPset,p))
1140  if not self._options.inlineEventContent:
1141  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1142  return label
1143  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1144  else:
1145  output.outputCommands = stream.content
1146 
1147 
1148  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1149 
1150  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1151  filterName = cms.untracked.string(stream.name))
1152 
1153  if self._options.filtername:
1154  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1155 
1156  #add an automatic flushing to limit memory consumption
1157  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1158 
1159  if workflow in ("producers,full"):
1160  if isinstance(stream.paths,tuple):
1161  for path in stream.paths:
1162  self.schedule.append(path)
1163  else:
1164  self.schedule.append(stream.paths)
1165 
1166 
1167  # in case of relvals we don't want to have additional outputs
1168  if (not self._options.relval) and workflow in ("full","output"):
1169  self.additionalOutputs[name] = output
1170  setattr(self.process,name,output)
1171 
1172  if workflow == 'output':
1173  # adjust the select events to the proper trigger results from previous process
1174  filterList = output.SelectEvents.SelectEvents
1175  for i, filter in enumerate(filterList):
1176  filterList[i] = filter+":"+self._options.triggerResultsProcess
1177 
1178  return output
1179 
1180  #----------------------------------------------------------------------------
1181  # here the methods to create the steps. Of course we are doing magic here ;)
1182  # prepare_STEPNAME modifies self.process and what else's needed.
1183  #----------------------------------------------------------------------------
1184 
1185  def loadDefaultOrSpecifiedCFF(self, sequence,defaultCFF,unsch=0):
1186  if ( len(sequence.split('.'))==1 ):
1187  l=self.loadAndRemember(defaultCFF,unsch)
1188  elif ( len(sequence.split('.'))==2 ):
1189  l=self.loadAndRemember(sequence.split('.')[0],unsch)
1190  sequence=sequence.split('.')[1]
1191  else:
1192  print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a"
1193  print sequence,"not recognized"
1194  raise
1195  return l
1196 
1197  def scheduleSequence(self,seq,prefix,what='Path'):
1198  if '*' in seq:
1199  #create only one path with all sequences in it
1200  for i,s in enumerate(seq.split('*')):
1201  if i==0:
1202  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1203  else:
1204  p=getattr(self.process,prefix)
1205  p+=getattr(self.process, s)
1206  self.schedule.append(getattr(self.process,prefix))
1207  return
1208  else:
1209  #create as many path as many sequences
1210  if not '+' in seq:
1211  if self.nextScheduleIsConditional:
1212  self.conditionalPaths.append(prefix)
1213  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1214  self.schedule.append(getattr(self.process,prefix))
1215  else:
1216  for i,s in enumerate(seq.split('+')):
1217  sn=prefix+'%d'%(i)
1218  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1219  self.schedule.append(getattr(self.process,sn))
1220  return
1221 
1222  def scheduleSequenceAtEnd(self,seq,prefix):
1223  self.scheduleSequence(seq,prefix,what='EndPath')
1224  return
1225 
1226  def prepare_ALCAPRODUCER(self, sequence = None):
1227  self.prepare_ALCA(sequence, workflow = "producers")
1228 
1229  def prepare_ALCAOUTPUT(self, sequence = None):
1230  self.prepare_ALCA(sequence, workflow = "output")
1231 
1232  def prepare_ALCA(self, sequence = None, workflow = 'full'):
1233  """ Enrich the process with alca streams """
1234  print 'DL enriching',workflow,sequence
1235  alcaConfig=self.loadDefaultOrSpecifiedCFF(sequence,self.ALCADefaultCFF)
1236  sequence = sequence.split('.')[-1]
1237 
1238  # decide which ALCA paths to use
1239  alcaList = sequence.split("+")
1240  maxLevel=0
1241  from Configuration.AlCa.autoAlca import autoAlca
1242  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1243  self.expandMapping(alcaList,autoAlca)
1244  self.AlCaPaths=[]
1245  for name in alcaConfig.__dict__:
1246  alcastream = getattr(alcaConfig,name)
1247  shortName = name.replace('ALCARECOStream','')
1248  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1249  output = self.addExtraStream(name,alcastream, workflow = workflow)
1250  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1251  self.AlCaPaths.append(shortName)
1252  if 'DQM' in alcaList:
1253  if not self._options.inlineEventContent and hasattr(self.process,name):
1254  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1255  else:
1256  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1257 
1258  #rename the HLT process name in the alca modules
1259  if self._options.hltProcess or 'HLT' in self.stepMap:
1260  if isinstance(alcastream.paths,tuple):
1261  for path in alcastream.paths:
1262  self.renameHLTprocessInSequence(path.label())
1263  else:
1264  self.renameHLTprocessInSequence(alcastream.paths.label())
1265 
1266  for i in range(alcaList.count(shortName)):
1267  alcaList.remove(shortName)
1268 
1269  # DQM needs a special handling
1270  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1271  path = getattr(alcaConfig,name)
1272  self.schedule.append(path)
1273  alcaList.remove('DQM')
1274 
1275  if isinstance(alcastream,cms.Path):
1276  #black list the alca path so that they do not appear in the cfg
1277  self.blacklist_paths.append(alcastream)
1278 
1279 
1280  if len(alcaList) != 0:
1281  available=[]
1282  for name in alcaConfig.__dict__:
1283  alcastream = getattr(alcaConfig,name)
1284  if isinstance(alcastream,cms.FilteredStream):
1285  available.append(name.replace('ALCARECOStream',''))
1286  print "The following alcas could not be found "+str(alcaList)
1287  print "available ",available
1288  #print "verify your configuration, ignoring for now"
1289  raise Exception("The following alcas could not be found "+str(alcaList))
1290 
1291  def prepare_LHE(self, sequence = None):
1292  #load the fragment
1293  ##make it loadable
1294  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1295  print "Loading lhe fragment from",loadFragment
1296  __import__(loadFragment)
1297  self.process.load(loadFragment)
1298  ##inline the modules
1299  self._options.inlineObjets+=','+sequence
1300 
1301  getattr(self.process,sequence).nEvents = int(self._options.number)
1302 
1303  #schedule it
1304  self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1305  self.excludedPaths.append("lhe_step")
1306  self.schedule.append( self.process.lhe_step )
1307 
1308  def prepare_GEN(self, sequence = None):
1309  """ load the fragment of generator configuration """
1310  loadFailure=False
1311  #remove trailing .py
1312  #support old style .cfi by changing into something.cfi into something_cfi
1313  #remove python/ from the name
1314  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1315  #standard location of fragments
1316  if not '/' in loadFragment:
1317  loadFragment='Configuration.Generator.'+loadFragment
1318  else:
1319  loadFragment=loadFragment.replace('/','.')
1320  try:
1321  print "Loading generator fragment from",loadFragment
1322  __import__(loadFragment)
1323  except:
1324  loadFailure=True
1325  #if self.process.source and self.process.source.type_()=='EmptySource':
1326  if not (self._options.filein or self._options.dasquery):
1327  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1328 
1329  if not loadFailure:
1330  generatorModule=sys.modules[loadFragment]
1331  genModules=generatorModule.__dict__
1332  #remove lhe producer module since this should have been
1333  #imported instead in the LHE step
1334  if self.LHEDefaultSeq in genModules:
1335  del genModules[self.LHEDefaultSeq]
1336 
1337  if self._options.hideGen:
1338  self.loadAndRemember(loadFragment)
1339  else:
1340  self.process.load(loadFragment)
1341  # expose the objects from that fragment to the configuration
1342  import FWCore.ParameterSet.Modules as cmstypes
1343  for name in genModules:
1344  theObject = getattr(generatorModule,name)
1345  if isinstance(theObject, cmstypes._Module):
1346  self._options.inlineObjets=name+','+self._options.inlineObjets
1347  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1348  self._options.inlineObjets+=','+name
1349 
1350  if sequence == self.GENDefaultSeq or sequence == 'pgen_genonly':
1351  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1352  self.productionFilterSequence = 'ProductionFilterSequence'
1353  elif 'generator' in genModules:
1354  self.productionFilterSequence = 'generator'
1355 
1356  """ Enrich the schedule with the rest of the generation step """
1357  self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1358  genSeqName=sequence.split('.')[-1]
1359 
1360  if True:
1361  try:
1362  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1363  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1364  self.loadAndRemember(cffToBeLoaded)
1365  except ImportError:
1366  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1367 
1368  if self._options.scenario == 'HeavyIons':
1369  if self._options.pileup=='HiMixGEN':
1370  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1371  else:
1372  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1373 
1374  self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1375  self.schedule.append(self.process.generation_step)
1376 
1377  #register to the genstepfilter the name of the path (static right now, but might evolve)
1378  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1379 
1380  if 'reGEN' in self.stepMap:
1381  #stop here
1382  return
1383 
1384  """ Enrich the schedule with the summary of the filter step """
1385  #the gen filter in the endpath
1386  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1387  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1388  return
1389 
1390  def prepare_SIM(self, sequence = None):
1391  """ Enrich the schedule with the simulation step"""
1392  self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1393  if not self._options.fast:
1394  if self._options.gflash==True:
1395  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1396 
1397  if self._options.magField=='0T':
1398  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1399  else:
1400  if self._options.magField=='0T':
1401  self.executeAndRemember("process.famosSimHits.UseMagneticField = cms.bool(False)")
1402 
1403  self.scheduleSequence(sequence.split('.')[-1],'simulation_step')
1404  return
1405 
1406  def prepare_DIGI(self, sequence = None):
1407  """ Enrich the schedule with the digitisation step"""
1408  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1409 
1410  if self._options.gflash==True:
1411  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1412 
1413  if sequence == 'pdigi_valid':
1414  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1415 
1416  if sequence != 'pdigi_nogen' and sequence != 'pdigi_valid_nogen' and not self.process.source.type_()=='EmptySource':
1417  if self._options.inputEventContent=='':
1418  self._options.inputEventContent='REGEN'
1419  else:
1420  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1421 
1422 
1423  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1424  return
1425 
1426  def prepare_DIGIPREMIX(self, sequence = None):
1427  """ Enrich the schedule with the digitisation step"""
1428  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1429 
1430  self.loadAndRemember("SimGeneral/MixingModule/digi_noNoise_cfi")
1431 
1432  if sequence == 'pdigi_valid':
1433  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1434  else:
1435  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1436 
1437  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1438  return
1439 
1440  def prepare_DIGIPREMIX_S2(self, sequence = None):
1441  """ Enrich the schedule with the digitisation step"""
1442  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1443 
1444  self.loadAndRemember("SimGeneral/MixingModule/digi_MixPreMix_cfi")
1445 
1446 
1447  if sequence == 'pdigi_valid':
1448  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1449  else:
1450  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1451 
1452  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1453  return
1454 
1455  def prepare_CFWRITER(self, sequence = None):
1456  """ Enrich the schedule with the crossing frame writer step"""
1457  self.loadAndRemember(self.CFWRITERDefaultCFF)
1458  self.scheduleSequence('pcfw','cfwriter_step')
1459  return
1460 
1461  def prepare_DATAMIX(self, sequence = None):
1462  """ Enrich the schedule with the digitisation step"""
1463  self.loadAndRemember(self.DATAMIXDefaultCFF)
1464  self.scheduleSequence('pdatamix','datamixing_step')
1465 
1466  if self._options.pileup_input:
1467  theFiles=''
1468  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1469  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1470  elif self._options.pileup_input.startswith("filelist:"):
1471  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1472  else:
1473  theFiles=self._options.pileup_input.split(',')
1474  #print theFiles
1475  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1476 
1477  return
1478 
1479  def prepare_DIGI2RAW(self, sequence = None):
1480  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGI2RAWDefaultCFF)
1481  self.scheduleSequence(sequence.split('.')[-1],'digi2raw_step')
1482  if "DIGIPREMIX" in self.stepMap.keys():
1483  self.executeAndRemember("process.esDigiToRaw.Label = cms.string('mix')") ##terrible hack - bypass zero suppression
1484  self.executeAndRemember("process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')") ##special readout mode for StripTracker
1485 
1486  return
1487 
1488  def prepare_REPACK(self, sequence = None):
1489  self.loadDefaultOrSpecifiedCFF(sequence,self.REPACKDefaultCFF)
1490  self.scheduleSequence(sequence.split('.')[-1],'digi2repack_step')
1491  return
1492 
1493  def prepare_L1(self, sequence = None):
1494  """ Enrich the schedule with the L1 simulation step"""
1495  assert(sequence == None)
1496  self.loadAndRemember(self.L1EMDefaultCFF)
1497  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1498  return
1499 
1500  def prepare_L1REPACK(self, sequence = None):
1501  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1502  supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT']
1503  if sequence in supported:
1504  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1505  if self._options.scenario == 'HeavyIons':
1506  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1507  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1508  else:
1509  print "L1REPACK with '",sequence,"' is not supported! Supported choices are: ",supported
1510  raise Exception('unsupported feature')
1511 
1512 
1513  def prepare_HLT(self, sequence = None):
1514  """ Enrich the schedule with the HLT simulation step"""
1515  if not sequence:
1516  print "no specification of the hlt menu has been given, should never happen"
1517  raise Exception('no HLT sequence provided')
1518 
1519  if '@' in sequence:
1520  # case where HLT:@something was provided
1521  from Configuration.HLT.autoHLT import autoHLT
1522  key = sequence[1:]
1523  if key in autoHLT:
1524  sequence = autoHLT[key]
1525  else:
1526  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1527 
1528  if ',' in sequence:
1529  #case where HLT:something:something was provided
1530  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1531  optionsForHLT = {}
1532  if self._options.scenario == 'HeavyIons':
1533  optionsForHLT['type'] = 'HIon'
1534  else:
1535  optionsForHLT['type'] = 'GRun'
1536  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.iteritems())
1537  if sequence == 'run,fromSource':
1538  if hasattr(self.process.source,'firstRun'):
1539  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1540  elif hasattr(self.process.source,'setRunNumber'):
1541  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1542  else:
1543  raise Exception('Cannot replace menu to load %s'%(sequence))
1544  else:
1545  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(sequence.replace(',',':'),optionsForHLTConfig))
1546  else:
1547  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % sequence)
1548 
1549  if self._options.isMC:
1550  if self._options.fast:
1551  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFastSim")
1552  else:
1553  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFullSim")
1554 
1555  if self._options.name != 'HLT':
1556  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1557  self.additionalCommands.append('process = ProcessName(process)')
1558  self.additionalCommands.append('')
1559  from HLTrigger.Configuration.CustomConfigs import ProcessName
1560  self.process = ProcessName(self.process)
1561 
1562  self.schedule.append(self.process.HLTSchedule)
1563  [self.blacklist_paths.append(path) for path in self.process.HLTSchedule if isinstance(path,(cms.Path,cms.EndPath))]
1564 
1565  #this is a fake, to be removed with fastim migration and HLT menu dump
1566  if self._options.fast:
1567  if not hasattr(self.process,'HLTEndSequence'):
1568  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1569 
1570 
1571  def prepare_RAW2RECO(self, sequence = None):
1572  if ','in sequence:
1573  seqReco=sequence.split(',')[1]
1574  seqDigi=sequence.split(',')[0]
1575  else:
1576  print "RAW2RECO requires two specifications",sequence,"insufficient"
1577 
1578  self.prepare_RAW2DIGI(seqDigi)
1579  self.prepare_RECO(seqReco)
1580  return
1581 
1582  def prepare_RAW2DIGI(self, sequence = "RawToDigi"):
1583  self.loadDefaultOrSpecifiedCFF(sequence,self.RAW2DIGIDefaultCFF)
1584  self.scheduleSequence(sequence,'raw2digi_step')
1585  # if self._options.isRepacked:
1586  #self.renameInputTagsInSequence(sequence)
1587  return
1588 
1589  def prepare_PATFILTER(self, sequence=None):
1590  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1591  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1592  for filt in allMetFilterPaths:
1593  self.schedule.append(getattr(self.process,'Flag_'+filt))
1594 
1595  def prepare_L1HwVal(self, sequence = 'L1HwVal'):
1596  ''' Enrich the schedule with L1 HW validation '''
1597  self.loadDefaultOrSpecifiedCFF(sequence,self.L1HwValDefaultCFF)
1598  #self.scheduleSequence(sequence.split('.')[-1],'l1hwval_step')
1599  print '\n\n\n DEPRECATED this has no action \n\n\n'
1600  return
1601 
1602  def prepare_L1Reco(self, sequence = "L1Reco"):
1603  ''' Enrich the schedule with L1 reconstruction '''
1604  self.loadDefaultOrSpecifiedCFF(sequence,self.L1RecoDefaultCFF)
1605  self.scheduleSequence(sequence.split('.')[-1],'L1Reco_step')
1606  return
1607 
1608  def prepare_L1TrackTrigger(self, sequence = "L1TrackTrigger"):
1609  ''' Enrich the schedule with L1 reconstruction '''
1611  self.scheduleSequence(sequence.split('.')[-1],'L1TrackTrigger_step')
1612  return
1613 
1614  def prepare_FILTER(self, sequence = None):
1615  ''' Enrich the schedule with a user defined filter sequence '''
1616  ## load the relevant part
1617  filterConfig=self.load(sequence.split('.')[0])
1618  filterSeq=sequence.split('.')[-1]
1619  ## print it in the configuration
1620  class PrintAllModules(object):
1621  def __init__(self):
1622  self.inliner=''
1623  pass
1624  def enter(self,visitee):
1625  try:
1626  label=visitee.label()
1627  ##needs to be in reverse order
1628  self.inliner=label+','+self.inliner
1629  except:
1630  pass
1631  def leave(self,v): pass
1632 
1633  expander=PrintAllModules()
1634  getattr(self.process,filterSeq).visit( expander )
1635  self._options.inlineObjets+=','+expander.inliner
1636  self._options.inlineObjets+=','+filterSeq
1637 
1638  ## put the filtering path in the schedule
1639  self.scheduleSequence(filterSeq,'filtering_step')
1640  self.nextScheduleIsConditional=True
1641  ## put it before all the other paths
1642  self.productionFilterSequence = filterSeq
1643 
1644  return
1645 
1646  def prepare_RECO(self, sequence = "reconstruction"):
1647  ''' Enrich the schedule with reconstruction '''
1648  self.loadDefaultOrSpecifiedCFF(sequence,self.RECODefaultCFF)
1649  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_step')
1650  return
1651 
1652  def prepare_RECOBEFMIX(self, sequence = "reconstruction"):
1653  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1654  if not self._options.fast:
1655  print "ERROR: this step is only implemented for FastSim"
1656  sys.exit()
1657  self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1658  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_befmix_step')
1659  return
1660 
1661  def prepare_PAT(self, sequence = "miniAOD"):
1662  ''' Enrich the schedule with PAT '''
1663  self.prepare_PATFILTER(self)
1664  self.loadDefaultOrSpecifiedCFF(sequence,self.PATDefaultCFF,1) #this is unscheduled
1665  if not self._options.runUnscheduled:
1666  raise Exception("MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1667  if self._options.isData:
1668  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1669  else:
1670  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1671  if self._options.fast:
1672  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff.miniAOD_customizeMETFiltersFastSim")
1673  return
1674 
1675  def prepare_EI(self, sequence = None):
1676  ''' Enrich the schedule with event interpretation '''
1677  from Configuration.StandardSequences.EventInterpretation import EventInterpretation
1678  if sequence in EventInterpretation:
1679  self.EIDefaultCFF = EventInterpretation[sequence]
1680  sequence = 'EIsequence'
1681  else:
1682  raise Exception('Cannot set %s event interpretation'%( sequence) )
1683  self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1684  self.scheduleSequence(sequence.split('.')[-1],'eventinterpretaion_step')
1685  return
1686 
1687  def prepare_SKIM(self, sequence = "all"):
1688  ''' Enrich the schedule with skimming fragments'''
1689  skimConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.SKIMDefaultCFF)
1690  sequence = sequence.split('.')[-1]
1691 
1692  skimlist=sequence.split('+')
1693  ## support @Mu+DiJet+@Electron configuration via autoSkim.py
1694  from Configuration.Skimming.autoSkim import autoSkim
1695  self.expandMapping(skimlist,autoSkim)
1696 
1697  #print "dictionnary for skims:",skimConfig.__dict__
1698  for skim in skimConfig.__dict__:
1699  skimstream = getattr(skimConfig,skim)
1700  if isinstance(skimstream,cms.Path):
1701  #black list the alca path so that they do not appear in the cfg
1702  self.blacklist_paths.append(skimstream)
1703  if (not isinstance(skimstream,cms.FilteredStream)):
1704  continue
1705  shortname = skim.replace('SKIMStream','')
1706  if (sequence=="all"):
1707  self.addExtraStream(skim,skimstream)
1708  elif (shortname in skimlist):
1709  self.addExtraStream(skim,skimstream)
1710  #add a DQM eventcontent for this guy
1711  if self._options.datatier=='DQM':
1712  self.process.load(self.EVTCONTDefaultCFF)
1713  skimstreamDQM = cms.FilteredStream(
1714  responsible = skimstream.responsible,
1715  name = skimstream.name+'DQM',
1716  paths = skimstream.paths,
1717  selectEvents = skimstream.selectEvents,
1718  content = self._options.datatier+'EventContent',
1719  dataTier = cms.untracked.string(self._options.datatier)
1720  )
1721  self.addExtraStream(skim+'DQM',skimstreamDQM)
1722  for i in range(skimlist.count(shortname)):
1723  skimlist.remove(shortname)
1724 
1725 
1726 
1727  if (skimlist.__len__()!=0 and sequence!="all"):
1728  print 'WARNING, possible typo with SKIM:'+'+'.join(skimlist)
1729  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1730 
1731  def prepare_USER(self, sequence = None):
1732  ''' Enrich the schedule with a user defined sequence '''
1733  self.loadDefaultOrSpecifiedCFF(sequence,self.USERDefaultCFF)
1734  self.scheduleSequence(sequence.split('.')[-1],'user_step')
1735  return
1736 
1737  def prepare_POSTRECO(self, sequence = None):
1738  """ Enrich the schedule with the postreco step """
1739  self.loadAndRemember(self.POSTRECODefaultCFF)
1740  self.scheduleSequence('postreco_generator','postreco_step')
1741  return
1742 
1743 
1744  def prepare_VALIDATION(self, sequence = 'validation'):
1745  print sequence,"in preparing validation"
1746  self.loadDefaultOrSpecifiedCFF(sequence,self.VALIDATIONDefaultCFF)
1747  from Validation.Configuration.autoValidation import autoValidation
1748  #in case VALIDATION:something:somethingelse -> something,somethingelse
1749  sequence=sequence.split('.')[-1]
1750  if sequence.find(',')!=-1:
1751  prevalSeqName=sequence.split(',')[0].split('+')
1752  valSeqName=sequence.split(',')[1].split('+')
1753  self.expandMapping(prevalSeqName,autoValidation,index=0)
1754  self.expandMapping(valSeqName,autoValidation,index=1)
1755  else:
1756  if '@' in sequence:
1757  prevalSeqName=sequence.split('+')
1758  valSeqName=sequence.split('+')
1759  self.expandMapping(prevalSeqName,autoValidation,index=0)
1760  self.expandMapping(valSeqName,autoValidation,index=1)
1761  else:
1762  postfix=''
1763  if sequence:
1764  postfix='_'+sequence
1765  prevalSeqName=['prevalidation'+postfix]
1766  valSeqName=['validation'+postfix]
1767  if not hasattr(self.process,valSeqName[0]):
1768  prevalSeqName=['']
1769  valSeqName=[sequence]
1770 
1771  def NFI(index):
1772  ##name from index, required to keep backward compatibility
1773  if index==0:
1774  return ''
1775  else:
1776  return '%s'%index
1777 
1778 
1779  #rename the HLT process in validation steps
1780  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1781  for s in valSeqName+prevalSeqName:
1782  if s:
1784  for (i,s) in enumerate(prevalSeqName):
1785  if s:
1786  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1787  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1788 
1789  for (i,s) in enumerate(valSeqName):
1790  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1791  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1792 
1793  #needed in case the miniAODValidation sequence is run starting from AODSIM
1794  if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1795  return
1796 
1797  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1798  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1799  self._options.restoreRNDSeeds=True
1800 
1801  if not 'DIGI' in self.stepMap and not self._options.fast:
1802  self.executeAndRemember("process.mix.playback = True")
1803  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1804  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1805  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1806 
1807  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1808  #will get in the schedule, smoothly
1809  for (i,s) in enumerate(valSeqName):
1810  getattr(self.process,'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,'validation_step%s'%NFI(i))._seq
1811 
1812  return
1813 
1814 
1816  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1817  It will climb down within PSets, VPSets and VInputTags to find its target"""
1818  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1819  self._paramReplace = paramReplace
1820  self._paramSearch = paramSearch
1821  self._verbose = verbose
1822  self._whitelist = whitelist
1823 
1824  def doIt(self,pset,base):
1825  if isinstance(pset, cms._Parameterizable):
1826  for name in pset.parameters_().keys():
1827  # skip whitelisted parameters
1828  if name in self._whitelist:
1829  continue
1830  # if I use pset.parameters_().items() I get copies of the parameter values
1831  # so I can't modify the nested pset
1832  value = getattr(pset,name)
1833  type = value.pythonTypeName()
1834  if type in ('cms.PSet', 'cms.untracked.PSet'):
1835  self.doIt(value,base+"."+name)
1836  elif type in ('cms.VPSet', 'cms.untracked.VPSet'):
1837  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1838  elif type in ('cms.string', 'cms.untracked.string'):
1839  if value.value() == self._paramSearch:
1840  if self._verbose: print "set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace)
1841  setattr(pset, name,self._paramReplace)
1842  elif type in ('cms.VInputTag', 'cms.untracked.VInputTag'):
1843  for (i,n) in enumerate(value):
1844  if not isinstance(n, cms.InputTag):
1845  n=cms.InputTag(n)
1846  if n.processName == self._paramSearch:
1847  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1848  if self._verbose:print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace)
1849  setattr(n,"processName",self._paramReplace)
1850  value[i]=n
1851  elif type in ('cms.vstring', 'cms.untracked.vstring'):
1852  for (i,n) in enumerate(value):
1853  if n==self._paramSearch:
1854  getattr(pset,name)[i]=self._paramReplace
1855  elif type in ('cms.InputTag', 'cms.untracked.InputTag'):
1856  if value.processName == self._paramSearch:
1857  if self._verbose: print "set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace)
1858  setattr(getattr(pset, name),"processName",self._paramReplace)
1859 
1860  def enter(self,visitee):
1861  label = ''
1862  try:
1863  label = visitee.label()
1864  except AttributeError:
1865  label = '<Module not in a Process>'
1866  except:
1867  label = 'other execption'
1868  self.doIt(visitee, label)
1869 
1870  def leave(self,visitee):
1871  pass
1872 
1873  #visit a sequence to repalce all input tags
1874  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1875  print "Replacing all InputTag %s => %s"%(oldT,newT)
1876  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1877  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1878  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1879  if not loadMe in self.additionalCommands:
1880  self.additionalCommands.append(loadMe)
1881  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1882 
1883  #change the process name used to address HLT results in any sequence
1884  def renameHLTprocessInSequence(self,sequence,proc=None,HLTprocess='HLT'):
1885  if self._options.hltProcess:
1886  proc=self._options.hltProcess
1887  else:
1888  proc=self.process.name_()
1889  if proc==HLTprocess: return
1890  # look up all module in dqm sequence
1891  print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1892  getattr(self.process,sequence).visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess,proc,whitelist = ("subSystemFolder",)))
1893  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1894  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1895  self.additionalCommands.append('process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1896 
1897 
1898  def expandMapping(self,seqList,mapping,index=None):
1899  maxLevel=20
1900  level=0
1901  while '@' in repr(seqList) and level<maxLevel:
1902  level+=1
1903  for specifiedCommand in seqList:
1904  if specifiedCommand.startswith('@'):
1905  location=specifiedCommand[1:]
1906  if not location in mapping:
1907  raise Exception("Impossible to map "+location+" from "+repr(mapping))
1908  mappedTo=mapping[location]
1909  if index!=None:
1910  mappedTo=mappedTo[index]
1911  seqList.remove(specifiedCommand)
1912  seqList.extend(mappedTo.split('+'))
1913  break;
1914  if level==maxLevel:
1915  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
1916 
1917  def prepare_DQM(self, sequence = 'DQMOffline'):
1918  # this one needs replacement
1919 
1920  self.loadDefaultOrSpecifiedCFF(sequence,self.DQMOFFLINEDefaultCFF)
1921  sequenceList=sequence.split('.')[-1].split('+')
1922  postSequenceList=sequence.split('.')[-1].split('+')
1923  from DQMOffline.Configuration.autoDQM import autoDQM
1924  self.expandMapping(sequenceList,autoDQM,index=0)
1925  self.expandMapping(postSequenceList,autoDQM,index=1)
1926 
1927  if len(set(sequenceList))!=len(sequenceList):
1928  sequenceList=list(set(sequenceList))
1929  print "Duplicate entries for DQM:, using",sequenceList
1930 
1931  pathName='dqmoffline_step'
1932  for (i,sequence) in enumerate(sequenceList):
1933  if (i!=0):
1934  pathName='dqmoffline_%d_step'%(i)
1935 
1936  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
1937  self.renameHLTprocessInSequence(sequence)
1938 
1939  # if both HLT and DQM are run in the same process, schedule [HLT]DQM in an EndPath
1940  # not for fastsim
1941  if 'HLT' in self.stepMap.keys() and not self._options.fast:
1942  # need to put [HLT]DQM in an EndPath, to access the HLT trigger results
1943  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1944  else:
1945  # schedule DQM as a standard Path
1946  setattr(self.process,pathName, cms.Path( getattr(self.process, sequence) ) )
1947  self.schedule.append(getattr(self.process,pathName))
1948 
1949  pathName='dqmofflineOnPAT_step'
1950  for (i,sequence) in enumerate(postSequenceList):
1951  if (i!=0):
1952  pathName='dqmofflineOnPAT_%d_step'%(i)
1953 
1954  # if both MINIAOD and DQM are run in the same process, schedule DQM in an EndPath
1955  if 'PAT' in self.stepMap.keys():
1956  # need to put DQM in an EndPath, to access the miniAOD filter results
1957  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1958  else:
1959  # schedule DQM as a standard Path
1960  setattr(self.process,pathName, cms.Path( getattr(self.process, sequence) ) )
1961  self.schedule.append(getattr(self.process,pathName))
1962 
1963  def prepare_HARVESTING(self, sequence = None):
1964  """ Enrich the process with harvesting step """
1965  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
1966  self.loadAndRemember(self.DQMSaverCFF)
1967 
1968  harvestingConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.HARVESTINGDefaultCFF)
1969  sequence = sequence.split('.')[-1]
1970 
1971  # decide which HARVESTING paths to use
1972  harvestingList = sequence.split("+")
1973  from DQMOffline.Configuration.autoDQM import autoDQM
1974  from Validation.Configuration.autoValidation import autoValidation
1975  import copy
1976  combined_mapping = copy.deepcopy( autoDQM )
1977  combined_mapping.update( autoValidation )
1978  self.expandMapping(harvestingList,combined_mapping,index=-1)
1979 
1980  if len(set(harvestingList))!=len(harvestingList):
1981  harvestingList=list(set(harvestingList))
1982  print "Duplicate entries for HARVESTING, using",harvestingList
1983 
1984  for name in harvestingList:
1985  if not name in harvestingConfig.__dict__:
1986  print name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1987  continue
1988  harvestingstream = getattr(harvestingConfig,name)
1989  if isinstance(harvestingstream,cms.Path):
1990  self.schedule.append(harvestingstream)
1991  self.blacklist_paths.append(harvestingstream)
1992  if isinstance(harvestingstream,cms.Sequence):
1993  setattr(self.process,name+"_step",cms.Path(harvestingstream))
1994  self.schedule.append(getattr(self.process,name+"_step"))
1995 
1996  self.scheduleSequence('DQMSaver','dqmsave_step')
1997  return
1998 
1999  def prepare_ALCAHARVEST(self, sequence = None):
2000  """ Enrich the process with AlCaHarvesting step """
2001  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2002  sequence=sequence.split(".")[-1]
2003 
2004  # decide which AlcaHARVESTING paths to use
2005  harvestingList = sequence.split("+")
2006 
2007 
2008 
2009  from Configuration.AlCa.autoPCL import autoPCL
2010  self.expandMapping(harvestingList,autoPCL)
2011 
2012  for name in harvestingConfig.__dict__:
2013  harvestingstream = getattr(harvestingConfig,name)
2014  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2015  self.schedule.append(harvestingstream)
2016  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2017  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2018  harvestingList.remove(name)
2019  # append the common part at the end of the sequence
2020  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2021  self.schedule.append(lastStep)
2022 
2023  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2024  print "The following harvesting could not be found : ", harvestingList
2025  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2026 
2027 
2028 
2029  def prepare_ENDJOB(self, sequence = 'endOfProcess'):
2030  self.loadDefaultOrSpecifiedCFF(sequence,self.ENDJOBDefaultCFF)
2031  self.scheduleSequenceAtEnd(sequence.split('.')[-1],'endjob_step')
2032  return
2033 
2035  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2036  self.schedule.append(self.process.reconstruction)
2037 
2038 
2039  def build_production_info(self, evt_type, evtnumber):
2040  """ Add useful info for the production. """
2041  self.process.configurationMetadata=cms.untracked.PSet\
2042  (version=cms.untracked.string("$Revision: 1.19 $"),
2043  name=cms.untracked.string("Applications"),
2044  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2045  )
2046 
2047  self.addedObjects.append(("Production Info","configurationMetadata"))
2048 
2049 
2050  def prepare(self, doChecking = False):
2051  """ Prepare the configuration string and add missing pieces."""
2052 
2053  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2054  self.addMaxEvents()
2055  if self.with_input:
2056  self.addSource()
2057  self.addStandardSequences()
2058  ##adding standard sequences might change the inputEventContent option and therefore needs to be finalized after
2059  self.completeInputCommand()
2060  self.addConditions()
2061 
2062 
2063  outputModuleCfgCode=""
2064  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2065  outputModuleCfgCode=self.addOutput()
2066 
2067  self.addCommon()
2068 
2069  self.pythonCfgCode = "# Auto generated configuration file\n"
2070  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2071  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2072  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2073  if hasattr(self._options,"era") and self._options.era :
2074  self.pythonCfgCode += "from Configuration.StandardSequences.Eras import eras\n\n"
2075  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"'" # Start of the line, finished after the loop
2076  # Multiple eras can be specified in a comma seperated list
2077  for requestedEra in self._options.era.split(",") :
2078  self.pythonCfgCode += ",eras."+requestedEra
2079  self.pythonCfgCode += ")\n\n" # end of the line
2080  else :
2081  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"')\n\n"
2082 
2083  self.pythonCfgCode += "# import of standard configurations\n"
2084  for module in self.imports:
2085  self.pythonCfgCode += ("process.load('"+module+"')\n")
2086 
2087  # production info
2088  if not hasattr(self.process,"configurationMetadata"):
2089  self.build_production_info(self._options.evt_type, self._options.number)
2090  else:
2091  #the PSet was added via a load
2092  self.addedObjects.append(("Production Info","configurationMetadata"))
2093 
2094  self.pythonCfgCode +="\n"
2095  for comment,object in self.addedObjects:
2096  if comment!="":
2097  self.pythonCfgCode += "\n# "+comment+"\n"
2098  self.pythonCfgCode += dumpPython(self.process,object)
2099 
2100  # dump the output definition
2101  self.pythonCfgCode += "\n# Output definition\n"
2102  self.pythonCfgCode += outputModuleCfgCode
2103 
2104  # dump all additional outputs (e.g. alca or skim streams)
2105  self.pythonCfgCode += "\n# Additional output definition\n"
2106  #I do not understand why the keys are not normally ordered.
2107  nl=self.additionalOutputs.keys()
2108  nl.sort()
2109  for name in nl:
2110  output = self.additionalOutputs[name]
2111  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2112  tmpOut = cms.EndPath(output)
2113  setattr(self.process,name+'OutPath',tmpOut)
2114  self.schedule.append(tmpOut)
2115 
2116  # dump all additional commands
2117  self.pythonCfgCode += "\n# Other statements\n"
2118  for command in self.additionalCommands:
2119  self.pythonCfgCode += command + "\n"
2120 
2121  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2122  for object in self._options.inlineObjets.split(','):
2123  if not object:
2124  continue
2125  if not hasattr(self.process,object):
2126  print 'cannot inline -'+object+'- : not known'
2127  else:
2128  self.pythonCfgCode +='\n'
2129  self.pythonCfgCode +=dumpPython(self.process,object)
2130 
2131  # dump all paths
2132  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2133  for path in self.process.paths:
2134  if getattr(self.process,path) not in self.blacklist_paths:
2135  self.pythonCfgCode += dumpPython(self.process,path)
2136 
2137  for endpath in self.process.endpaths:
2138  if getattr(self.process,endpath) not in self.blacklist_paths:
2139  self.pythonCfgCode += dumpPython(self.process,endpath)
2140 
2141  # dump the schedule
2142  self.pythonCfgCode += "\n# Schedule definition\n"
2143  result = "process.schedule = cms.Schedule("
2144 
2145  # handling of the schedule
2146  self.process.schedule = cms.Schedule()
2147  for item in self.schedule:
2148  if not isinstance(item, cms.Schedule):
2149  self.process.schedule.append(item)
2150  else:
2151  self.process.schedule.extend(item)
2152 
2153  if hasattr(self.process,"HLTSchedule"):
2154  beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2155  afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2156  pathNames = ['process.'+p.label_() for p in beforeHLT]
2157  result += ','.join(pathNames)+')\n'
2158  result += 'process.schedule.extend(process.HLTSchedule)\n'
2159  pathNames = ['process.'+p.label_() for p in afterHLT]
2160  result += 'process.schedule.extend(['+','.join(pathNames)+'])\n'
2161  else:
2162  pathNames = ['process.'+p.label_() for p in self.schedule]
2163  result ='process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2164 
2165  self.pythonCfgCode += result
2166 
2167  if self._options.nThreads is not "1":
2168  self.pythonCfgCode +="\n"
2169  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2170  self.pythonCfgCode +="process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+")\n"
2171  self.pythonCfgCode +="process.options.numberOfStreams=cms.untracked.uint32(0)\n"
2172  #repacked version
2173  if self._options.isRepacked:
2174  self.pythonCfgCode +="\n"
2175  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2176  self.pythonCfgCode +="MassReplaceInputTag(process)\n"
2177  MassReplaceInputTag(self.process)
2178 
2179  # special treatment in case of production filter sequence 2/2
2180  if self.productionFilterSequence:
2181  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2182  self.pythonCfgCode +='for path in process.paths:\n'
2183  if len(self.conditionalPaths):
2184  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2185  if len(self.excludedPaths):
2186  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2187  self.pythonCfgCode +='\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2188  pfs = getattr(self.process,self.productionFilterSequence)
2189  for path in self.process.paths:
2190  if not path in self.conditionalPaths: continue
2191  if path in self.excludedPaths: continue
2192  getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2193 
2194 
2195  # dump customise fragment
2196  self.pythonCfgCode += self.addCustomise()
2197 
2198  if self._options.runUnscheduled:
2199  # prune and delete paths
2200  #this is not supporting the blacklist at this point since I do not understand it
2201  self.pythonCfgCode+="#do not add changes to your config after this point (unless you know what you are doing)\n"
2202  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import convertToUnscheduled\n"
2203  self.pythonCfgCode+="process=convertToUnscheduled(process)\n"
2204 
2205  from FWCore.ParameterSet.Utilities import convertToUnscheduled
2206  self.process=convertToUnscheduled(self.process)
2207 
2208  #now add the unscheduled stuff
2209  for module in self.importsUnsch:
2210  self.process.load(module)
2211  self.pythonCfgCode += ("process.load('"+module+"')\n")
2212 
2213  #and clean the unscheduled stuff
2214  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import cleanUnscheduled\n"
2215  self.pythonCfgCode+="process=cleanUnscheduled(process)\n"
2216 
2217  from FWCore.ParameterSet.Utilities import cleanUnscheduled
2218  self.process=cleanUnscheduled(self.process)
2219 
2220 
2221  self.pythonCfgCode += self.addCustomise(1)
2222 
2223 
2224  # make the .io file
2225 
2226  if self._options.io:
2227  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2228  if not self._options.io.endswith('.io'): self._option.io+='.io'
2229  io=open(self._options.io,'w')
2230  ioJson={}
2231  if hasattr(self.process.source,"fileNames"):
2232  if len(self.process.source.fileNames.value()):
2233  ioJson['primary']=self.process.source.fileNames.value()
2234  if hasattr(self.process.source,"secondaryFileNames"):
2235  if len(self.process.source.secondaryFileNames.value()):
2236  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2237  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2238  ioJson['pileup']=self._options.pileup_input[4:]
2239  for (o,om) in self.process.outputModules_().items():
2240  ioJson[o]=om.fileName.value()
2241  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2242  if self.productionFilterSequence:
2243  ioJson['filter']=self.productionFilterSequence
2244  import json
2245  io.write(json.dumps(ioJson))
2246  return
2247 
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:34
assert(m_qm.get())
def visit
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_RECO
put the filtering path in the schedule
def massSearchReplaceAnyInputTag
Definition: helpers.py:265
def defineMixing
Definition: Mixing.py:169
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
inliner
needs to be in reverse order
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def convertToUnscheduled
Definition: Utilities.py:69
def cleanUnscheduled
Definition: Utilities.py:107
double split
Definition: MVATrainer.cc:139
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run