CMS 3D CMS Logo

All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python
2 
3 __version__ = "$Revision: 1.19 $"
4 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
5 
6 import FWCore.ParameterSet.Config as cms
7 from FWCore.ParameterSet.Modules import _Module
8 import sys
9 import re
10 import collections
11 from subprocess import Popen,PIPE
12 import FWCore.ParameterSet.DictTypes as DictTypes
13 class Options:
14  pass
15 
16 # the canonical defaults
17 defaultOptions = Options()
18 defaultOptions.datamix = 'DataOnSim'
19 defaultOptions.isMC=False
20 defaultOptions.isData=True
21 defaultOptions.step=''
22 defaultOptions.pileup='NoPileUp'
23 defaultOptions.pileup_input = None
24 defaultOptions.pileup_dasoption = ''
25 defaultOptions.geometry = 'SimDB'
26 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
27 defaultOptions.magField = ''
28 defaultOptions.conditions = None
29 defaultOptions.useCondDBv1 = False
30 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
31 defaultOptions.harvesting= 'AtRunEnd'
32 defaultOptions.gflash = False
33 defaultOptions.number = -1
34 defaultOptions.number_out = None
35 defaultOptions.arguments = ""
36 defaultOptions.name = "NO NAME GIVEN"
37 defaultOptions.evt_type = ""
38 defaultOptions.filein = ""
39 defaultOptions.dasquery=""
40 defaultOptions.dasoption=""
41 defaultOptions.secondfilein = ""
42 defaultOptions.customisation_file = []
43 defaultOptions.customisation_file_unsch = []
44 defaultOptions.customise_commands = ""
45 defaultOptions.inline_custom=False
46 defaultOptions.particleTable = 'pythiapdt'
47 defaultOptions.particleTableList = ['pythiapdt','pdt']
48 defaultOptions.dirin = ''
49 defaultOptions.dirout = ''
50 defaultOptions.filetype = 'EDM'
51 defaultOptions.fileout = 'output.root'
52 defaultOptions.filtername = ''
53 defaultOptions.lazy_download = False
54 defaultOptions.custom_conditions = ''
55 defaultOptions.hltProcess = ''
56 defaultOptions.eventcontent = None
57 defaultOptions.datatier = None
58 defaultOptions.inlineEventContent = True
59 defaultOptions.inlineObjets =''
60 defaultOptions.hideGen=False
61 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
62 defaultOptions.beamspot=None
63 defaultOptions.outputDefinition =''
64 defaultOptions.inputCommands = None
65 defaultOptions.outputCommands = None
66 defaultOptions.inputEventContent = ''
67 defaultOptions.dropDescendant = False
68 defaultOptions.relval = None
69 defaultOptions.slhc = None
70 defaultOptions.profile = None
71 defaultOptions.isRepacked = False
72 defaultOptions.restoreRNDSeeds = False
73 defaultOptions.donotDropOnInput = ''
74 defaultOptions.python_filename =''
75 defaultOptions.io=None
76 defaultOptions.lumiToProcess=None
77 defaultOptions.fast=False
78 defaultOptions.runsAndWeightsForMC = None
79 defaultOptions.runsScenarioForMC = None
80 defaultOptions.runUnscheduled = False
81 defaultOptions.timeoutOutput = False
82 defaultOptions.nThreads = '1'
83 
84 # some helper routines
85 def dumpPython(process,name):
86  theObject = getattr(process,name)
87  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
88  return "process."+name+" = " + theObject.dumpPython("process")
89  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
90  return "process."+name+" = " + theObject.dumpPython()+"\n"
91  else:
92  return "process."+name+" = " + theObject.dumpPython()+"\n"
93 def filesFromList(fileName,s=None):
94  import os
95  import FWCore.ParameterSet.Config as cms
96  prim=[]
97  sec=[]
98  for line in open(fileName,'r'):
99  if line.count(".root")>=2:
100  #two files solution...
101  entries=line.replace("\n","").split()
102  if not entries[0] in prim:
103  prim.append(entries[0])
104  if not entries[1] in sec:
105  sec.append(entries[1])
106  elif (line.find(".root")!=-1):
107  entry=line.replace("\n","")
108  if not entry in prim:
109  prim.append(entry)
110  if s:
111  if not hasattr(s,"fileNames"):
112  s.fileNames=cms.untracked.vstring(prim)
113  else:
114  s.fileNames.extend(prim)
115  if len(sec)!=0:
116  if not hasattr(s,"secondaryFileNames"):
117  s.secondaryFileNames=cms.untracked.vstring(sec)
118  else:
119  s.secondaryFileNames.extend(sec)
120  print "found files: ",prim
121  if len(prim)==0:
122  raise Exception("There are not files in input from the file list")
123  if len(sec)!=0:
124  print "found parent files:",sec
125  return (prim,sec)
126 
127 def filesFromDASQuery(query,option="",s=None):
128  import os,time
129  import FWCore.ParameterSet.Config as cms
130  prim=[]
131  sec=[]
132  print "the query is",query
133  eC=5
134  count=0
135  while eC!=0 and count<3:
136  if count!=0:
137  print 'Sleeping, then retrying DAS'
138  time.sleep(100)
139  p = Popen('das_client.py %s --query "%s"'%(option,query), stdout=PIPE,shell=True)
140  pipe=p.stdout.read()
141  tupleP = os.waitpid(p.pid, 0)
142  eC=tupleP[1]
143  count=count+1
144  if eC==0:
145  print "DAS succeeded after",count,"attempts",eC
146  else:
147  print "DAS failed 3 times- I give up"
148  for line in pipe.split('\n'):
149  if line.count(".root")>=2:
150  #two files solution...
151  entries=line.replace("\n","").split()
152  if not entries[0] in prim:
153  prim.append(entries[0])
154  if not entries[1] in sec:
155  sec.append(entries[1])
156  elif (line.find(".root")!=-1):
157  entry=line.replace("\n","")
158  if not entry in prim:
159  prim.append(entry)
160  if s:
161  if not hasattr(s,"fileNames"):
162  s.fileNames=cms.untracked.vstring(prim)
163  else:
164  s.fileNames.extend(prim)
165  if len(sec)!=0:
166  if not hasattr(s,"secondaryFileNames"):
167  s.secondaryFileNames=cms.untracked.vstring(sec)
168  else:
169  s.secondaryFileNames.extend(sec)
170  print "found files: ",prim
171  if len(sec)!=0:
172  print "found parent files:",sec
173  return (prim,sec)
174 
175 def MassReplaceInputTag(aProcess,oldT="rawDataCollector",newT="rawDataRepacker"):
176  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
177  for s in aProcess.paths_().keys():
178  massSearchReplaceAnyInputTag(getattr(aProcess,s),oldT,newT)
179 
180 def anyOf(listOfKeys,dict,opt=None):
181  for k in listOfKeys:
182  if k in dict:
183  toReturn=dict[k]
184  dict.pop(k)
185  return toReturn
186  if opt!=None:
187  return opt
188  else:
189  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
190 
192  """The main building routines """
193 
194  def __init__(self, options, process = None, with_output = False, with_input = False ):
195  """options taken from old cmsDriver and optparse """
196 
197  options.outfile_name = options.dirout+options.fileout
198 
199  self._options = options
200 
201  if self._options.isData and options.isMC:
202  raise Exception("ERROR: You may specify only --data or --mc, not both")
203  #if not self._options.conditions:
204  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
205 
206  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
207  if 'ENDJOB' in self._options.step:
208  if (hasattr(self._options,"outputDefinition") and \
209  self._options.outputDefinition != '' and \
210  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
211  (hasattr(self._options,"datatier") and \
212  self._options.datatier and \
213  'DQMIO' in self._options.datatier):
214  print "removing ENDJOB from steps since not compatible with DQMIO dataTier"
215  self._options.step=self._options.step.replace(',ENDJOB','')
216 
217 
218 
219  # what steps are provided by this class?
220  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
221  self.stepMap={}
222  self.stepKeys=[]
223  for step in self._options.step.split(","):
224  if step=='': continue
225  stepParts = step.split(":")
226  stepName = stepParts[0]
227  if stepName not in stepList and not stepName.startswith('re'):
228  raise ValueError("Step "+stepName+" unknown")
229  if len(stepParts)==1:
230  self.stepMap[stepName]=""
231  elif len(stepParts)==2:
232  self.stepMap[stepName]=stepParts[1].split('+')
233  elif len(stepParts)==3:
234  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
235  else:
236  raise ValueError("Step definition "+step+" invalid")
237  self.stepKeys.append(stepName)
238 
239  #print "map of steps is:",self.stepMap
240 
241  self.with_output = with_output
242  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
243  self.with_output = False
244  self.with_input = with_input
245  if process == None:
246  self.process = cms.Process(self._options.name)
247  else:
248  self.process = process
249  self.imports = []
250  self.importsUnsch = []
251  self.define_Configs()
252  self.schedule = list()
253 
254  # we are doing three things here:
255  # creating a process to catch errors
256  # building the code to re-create the process
257 
258  self.additionalCommands = []
259  # TODO: maybe a list of to be dumped objects would help as well
260  self.blacklist_paths = []
261  self.addedObjects = []
262  self.additionalOutputs = {}
263 
264  self.productionFilterSequence = None
265  self.nextScheduleIsConditional=False
266  self.conditionalPaths=[]
267  self.excludedPaths=[]
268 
269  def profileOptions(self):
270  """
271  addIgProfService
272  Function to add the igprof profile service so that you can dump in the middle
273  of the run.
274  """
275  profileOpts = self._options.profile.split(':')
276  profilerStart = 1
277  profilerInterval = 100
278  profilerFormat = None
279  profilerJobFormat = None
280 
281  if len(profileOpts):
282  #type, given as first argument is unused here
283  profileOpts.pop(0)
284  if len(profileOpts):
285  startEvent = profileOpts.pop(0)
286  if not startEvent.isdigit():
287  raise Exception("%s is not a number" % startEvent)
288  profilerStart = int(startEvent)
289  if len(profileOpts):
290  eventInterval = profileOpts.pop(0)
291  if not eventInterval.isdigit():
292  raise Exception("%s is not a number" % eventInterval)
293  profilerInterval = int(eventInterval)
294  if len(profileOpts):
295  profilerFormat = profileOpts.pop(0)
296 
297 
298  if not profilerFormat:
299  profilerFormat = "%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace("_cfi", ""),
300  self._options.step,
301  self._options.pileup,
302  self._options.conditions,
303  self._options.datatier,
304  self._options.profileTypeLabel)
305  if not profilerJobFormat and profilerFormat.endswith(".gz"):
306  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
307  elif not profilerJobFormat:
308  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
309 
310  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
311 
312  def load(self,includeFile):
313  includeFile = includeFile.replace('/','.')
314  self.process.load(includeFile)
315  return sys.modules[includeFile]
316 
317  def loadAndRemember(self, includeFile,unsch=0):
318  """helper routine to load am memorize imports"""
319  # we could make the imports a on-the-fly data method of the process instance itself
320  # not sure if the latter is a good idea
321  includeFile = includeFile.replace('/','.')
322  if unsch==0:
323  self.imports.append(includeFile)
324  self.process.load(includeFile)
325  return sys.modules[includeFile]
326  else:
327  self.importsUnsch.append(includeFile)
328  return 0#sys.modules[includeFile]
329 
330  def executeAndRemember(self, command):
331  """helper routine to remember replace statements"""
332  self.additionalCommands.append(command)
333  if not command.strip().startswith("#"):
334  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
335  import re
336  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
337  #exec(command.replace("process.","self.process."))
338 
339  def addCommon(self):
340  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
341  self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring('ProductNotFound'),fileMode = cms.untracked.string('FULLMERGE'))
342  else:
343  self.process.options = cms.untracked.PSet( )
344 
345  if self._options.runUnscheduled:
346  self.process.options.allowUnscheduled=cms.untracked.bool(True)
347 
348  self.addedObjects.append(("","options"))
349 
350  if self._options.lazy_download:
351  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
352  stats = cms.untracked.bool(True),
353  enable = cms.untracked.bool(True),
354  cacheHint = cms.untracked.string("lazy-download"),
355  readHint = cms.untracked.string("read-ahead-buffered")
356  )
357  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
358 
359  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
360  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
361 
362  if self._options.profile:
363  (start, interval, eventFormat, jobFormat)=self.profileOptions()
364  self.process.IgProfService = cms.Service("IgProfService",
365  reportFirstEvent = cms.untracked.int32(start),
366  reportEventInterval = cms.untracked.int32(interval),
367  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
368  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
369  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
370 
371  def addMaxEvents(self):
372  """Here we decide how many evts will be processed"""
373  self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(int(self._options.number)))
374  if self._options.number_out:
375  self.process.maxEvents.output = cms.untracked.int32(int(self._options.number_out))
376  self.addedObjects.append(("","maxEvents"))
377 
378  def addSource(self):
379  """Here the source is built. Priority: file, generator"""
380  self.addedObjects.append(("Input source","source"))
381 
382  def filesFromOption(self):
383  for entry in self._options.filein.split(','):
384  print "entry",entry
385  if entry.startswith("filelist:"):
386  filesFromList(entry[9:],self.process.source)
387  elif entry.startswith("dbs:") or entry.startswith("das:"):
388  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
389  else:
390  self.process.source.fileNames.append(self._options.dirin+entry)
391  if self._options.secondfilein:
392  if not hasattr(self.process.source,"secondaryFileNames"):
393  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
394  for entry in self._options.secondfilein.split(','):
395  print "entry",entry
396  if entry.startswith("filelist:"):
397  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
398  elif entry.startswith("dbs:") or entry.startswith("das:"):
399  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
400  else:
401  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
402 
403  if self._options.filein or self._options.dasquery:
404  if self._options.filetype == "EDM":
405  self.process.source=cms.Source("PoolSource",
406  fileNames = cms.untracked.vstring(),
407  secondaryFileNames= cms.untracked.vstring())
408  filesFromOption(self)
409  elif self._options.filetype == "DAT":
410  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
411  filesFromOption(self)
412  elif self._options.filetype == "LHE":
413  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
414  if self._options.filein.startswith("lhe:"):
415  #list the article directory automatically
416  args=self._options.filein.split(':')
417  article=args[1]
418  print 'LHE input from article ',article
419  location='/store/lhe/'
420  import os
421  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
422  for line in textOfFiles:
423  for fileName in [x for x in line.split() if '.lhe' in x]:
424  self.process.source.fileNames.append(location+article+'/'+fileName)
425  if len(args)>2:
426  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
427  else:
428  filesFromOption(self)
429 
430 
431  elif self._options.filetype == "DQM":
432  self.process.source=cms.Source("DQMRootSource",
433  fileNames = cms.untracked.vstring())
434  filesFromOption(self)
435 
436  elif self._options.filetype == "DQMDAQ":
437  # FIXME: how to configure it if there are no input files specified?
438  self.process.source=cms.Source("DQMStreamerReader")
439 
440 
441  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
442  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
443 
444  if self._options.dasquery!='':
445  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
446  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
447 
448  ##drop LHEXMLStringProduct on input to save memory if appropriate
449  if 'GEN' in self.stepMap.keys():
450  if self._options.inputCommands:
451  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
452  else:
453  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
454 
455  if self.process.source and self._options.inputCommands:
456  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
457  for command in self._options.inputCommands.split(','):
458  # remove whitespace around the keep/drop statements
459  command = command.strip()
460  if command=='': continue
461  self.process.source.inputCommands.append(command)
462  if not self._options.dropDescendant:
463  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
464 
465  if self._options.lumiToProcess:
466  import FWCore.PythonUtilities.LumiList as LumiList
467  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
468 
469  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
470  if self.process.source is None:
471  self.process.source=cms.Source("EmptySource")
472 
473  # modify source in case of run-dependent MC
474  self.runsAndWeights=None
475  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
476  if not self._options.isMC :
477  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
478  if self._options.runsAndWeightsForMC:
479  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
480  else:
481  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
482  if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
483  __import__(RunsAndWeights[self._options.runsScenarioForMC])
484  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
485  else:
486  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
487 
488  if self.runsAndWeights:
489  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
490  ThrowAndSetRandomRun.throwAndSetRandomRun(self.process.source,self.runsAndWeights)
491  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
492  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
493 
494  return
495 
496  def addOutput(self):
497  """ Add output module to the process """
498  result=""
499  if self._options.outputDefinition:
500  if self._options.datatier:
501  print "--datatier & --eventcontent options ignored"
502 
503  #new output convention with a list of dict
504  outList = eval(self._options.outputDefinition)
505  for (id,outDefDict) in enumerate(outList):
506  outDefDictStr=outDefDict.__str__()
507  if not isinstance(outDefDict,dict):
508  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
509  #requires option: tier
510  theTier=anyOf(['t','tier','dataTier'],outDefDict)
511  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
512  ## event content
513  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
514  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
515  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
516  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
517  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
518  # module label has a particular role
519  if not theModuleLabel:
520  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
521  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
522  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
523  ]
524  for name in tryNames:
525  if not hasattr(self.process,name):
526  theModuleLabel=name
527  break
528  if not theModuleLabel:
529  raise Exception("cannot find a module label for specification: "+outDefDictStr)
530  if id==0:
531  defaultFileName=self._options.outfile_name
532  else:
533  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
534 
535  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
536  if not theFileName.endswith('.root'):
537  theFileName+='.root'
538 
539  if len(outDefDict.keys()):
540  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
541  if theStreamType=='DQMIO': theStreamType='DQM'
542  if theStreamType=='ALL':
543  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
544  else:
545  theEventContent = getattr(self.process, theStreamType+"EventContent")
546 
547 
548  addAlCaSelects=False
549  if theStreamType=='ALCARECO' and not theFilterName:
550  theFilterName='StreamALCACombined'
551  addAlCaSelects=True
552 
553  CppType='PoolOutputModule'
554  if self._options.timeoutOutput:
555  CppType='TimeoutPoolOutputModule'
556  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
557  output = cms.OutputModule(CppType,
558  theEventContent.clone(),
559  fileName = cms.untracked.string(theFileName),
560  dataset = cms.untracked.PSet(
561  dataTier = cms.untracked.string(theTier),
562  filterName = cms.untracked.string(theFilterName))
563  )
564  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
565  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
566  if not theSelectEvent and hasattr(self.process,'filtering_step'):
567  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
568  if theSelectEvent:
569  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
570 
571  if addAlCaSelects:
572  if not hasattr(output,'SelectEvents'):
573  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
574  for alca in self.AlCaPaths:
575  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
576 
577 
578  if hasattr(self.process,theModuleLabel):
579  raise Exception("the current process already has a module "+theModuleLabel+" defined")
580  #print "creating output module ",theModuleLabel
581  setattr(self.process,theModuleLabel,output)
582  outputModule=getattr(self.process,theModuleLabel)
583  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
584  path=getattr(self.process,theModuleLabel+'_step')
585  self.schedule.append(path)
586 
587  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
588  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"):
589  return label
590  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
591  if theExtraOutputCommands:
592  if not isinstance(theExtraOutputCommands,list):
593  raise Exception("extra ouput command in --option must be a list of strings")
594  if hasattr(self.process,theStreamType+"EventContent"):
595  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
596  else:
597  outputModule.outputCommands.extend(theExtraOutputCommands)
598 
599  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
600 
601  ##ends the --output options model
602  return result
603 
604  streamTypes=self._options.eventcontent.split(',')
605  tiers=self._options.datatier.split(',')
606  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
607  raise Exception("number of event content arguments does not match number of datatier arguments")
608 
609  # if the only step is alca we don't need to put in an output
610  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
611  return "\n"
612 
613  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
614  if streamType=='': continue
615  if streamType=='DQMIO': streamType='DQM'
616  theEventContent = getattr(self.process, streamType+"EventContent")
617  if i==0:
618  theFileName=self._options.outfile_name
619  theFilterName=self._options.filtername
620  else:
621  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
622  theFilterName=self._options.filtername
623  CppType='PoolOutputModule'
624  if self._options.timeoutOutput:
625  CppType='TimeoutPoolOutputModule'
626  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
627  output = cms.OutputModule(CppType,
628  theEventContent,
629  fileName = cms.untracked.string(theFileName),
630  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
631  filterName = cms.untracked.string(theFilterName)
632  )
633  )
634  if hasattr(self.process,"generation_step") and streamType!='LHE':
635  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
636  if hasattr(self.process,"filtering_step"):
637  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
638 
639  if streamType=='ALCARECO':
640  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
641 
642  if "MINIAOD" in streamType:
643  output.dropMetaData = cms.untracked.string('ALL')
644  output.fastCloning= cms.untracked.bool(False)
645  output.overrideInputFileSplitLevels = cms.untracked.bool(True)
646 
647  outputModuleName=streamType+'output'
648  setattr(self.process,outputModuleName,output)
649  outputModule=getattr(self.process,outputModuleName)
650  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
651  path=getattr(self.process,outputModuleName+'_step')
652  self.schedule.append(path)
653 
654  if self._options.outputCommands and streamType!='DQM':
655  for evct in self._options.outputCommands.split(','):
656  if not evct: continue
657  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
658 
659  if not self._options.inlineEventContent:
660  def doNotInlineEventContent(instance,label = "process."+streamType+"EventContent.outputCommands"):
661  return label
662  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
663 
664  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
665 
666  return result
667 
669  """
670  Add selected standard sequences to the process
671  """
672  # load the pile up file
673  if self._options.pileup:
674  pileupSpec=self._options.pileup.split(',')[0]
675 
676  # FastSim: GEN-mixing or DIGI-RECO mixing?
677  GEN_mixing = False
678  if self._options.fast and pileupSpec.find("GEN_") == 0:
679  GEN_mixing = True
680  pileupSpec = pileupSpec[4:]
681 
682  # Does the requested pile-up scenario exist?
683  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
684  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
685  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
686  if self._options.fast:
687  message += "\n-"*20+"\n additional options for FastSim (gen-mixing):\n" + "-"*20 + "\n" + '\n'.join(["GEN_" + x for x in Mixing.keys()]) + "\n"
688  raise Exception(message)
689 
690  # Put mixing parameters in a dictionary
691  if '.' in pileupSpec:
692  mixingDict={'file':pileupSpec}
693  elif pileupSpec.startswith('file:'):
694  mixingDict={'file':pileupSpec[5:]}
695  else:
696  import copy
697  mixingDict=copy.copy(Mixing[pileupSpec])
698  if len(self._options.pileup.split(','))>1:
699  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
700 
701  # Load the pu cfg file corresponding to the requested pu scenario
702  if 'file:' in pileupSpec:
703  #the file is local
704  self.process.load(mixingDict['file'])
705  print "inlining mixing module configuration"
706  self._options.inlineObjets+=',mix'
707  else:
708  self.loadAndRemember(mixingDict['file'])
709 
710  # FastSim: transform cfg of MixingModule from FullSim to FastSim
711  if self._options.fast:
712  if GEN_mixing:
713  self._options.customisation_file.insert(0,"FastSimulation/Configuration/MixingModule_Full2Fast.prepareGenMixing")
714  else:
715  self._options.customisation_file.insert(0,"FastSimulation/Configuration/MixingModule_Full2Fast.prepareDigiRecoMixing")
716 
717  mixingDict.pop('file')
718  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
719  if self._options.pileup_input:
720  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
721  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
722  else:
723  mixingDict['F']=self._options.pileup_input.split(',')
724  specialization=defineMixing(mixingDict)
725  for command in specialization:
726  self.executeAndRemember(command)
727  if len(mixingDict)!=0:
728  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
729 
730 
731  # load the geometry file
732  try:
733  if len(self.stepMap):
734  self.loadAndRemember(self.GeometryCFF)
735  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
736  self.loadAndRemember(self.SimGeometryCFF)
737  if self.geometryDBLabel:
738  self.executeAndRemember('process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
739  except ImportError:
740  print "Geometry option",self._options.geometry,"unknown."
741  raise
742 
743  if len(self.stepMap):
744  self.loadAndRemember(self.magFieldCFF)
745 
746  for stepName in self.stepKeys:
747  stepSpec = self.stepMap[stepName]
748  print "Step:", stepName,"Spec:",stepSpec
749  if stepName.startswith('re'):
750  ##add the corresponding input content
751  if stepName[2:] not in self._options.donotDropOnInput:
752  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
753  stepName=stepName[2:]
754  if stepSpec=="":
755  getattr(self,"prepare_"+stepName)(sequence = getattr(self,stepName+"DefaultSeq"))
756  elif type(stepSpec)==list:
757  getattr(self,"prepare_"+stepName)(sequence = '+'.join(stepSpec))
758  elif type(stepSpec)==tuple:
759  getattr(self,"prepare_"+stepName)(sequence = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
760  else:
761  raise ValueError("Invalid step definition")
762 
763  if self._options.restoreRNDSeeds!=False:
764  #it is either True, or a process name
765  if self._options.restoreRNDSeeds==True:
766  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
767  else:
768  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
769  if self._options.inputEventContent or self._options.inputCommands:
770  if self._options.inputCommands:
771  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
772  else:
773  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
774 
775 
777  if self._options.inputEventContent:
778  import copy
779  def dropSecondDropStar(iec):
780  #drop occurence of 'drop *' in the list
781  count=0
782  for item in iec:
783  if item=='drop *':
784  if count!=0:
785  iec.remove(item)
786  count+=1
787 
788 
789  ## allow comma separated input eventcontent
790  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
791  for evct in self._options.inputEventContent.split(','):
792  if evct=='': continue
793  theEventContent = getattr(self.process, evct+"EventContent")
794  if hasattr(theEventContent,'outputCommands'):
795  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
796  if hasattr(theEventContent,'inputCommands'):
797  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
798 
799  dropSecondDropStar(self.process.source.inputCommands)
800 
801  if not self._options.dropDescendant:
802  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
803 
804 
805  return
806 
807  def addConditions(self):
808  """Add conditions to the process"""
809  if not self._options.conditions: return
810 
811  if 'FrontierConditions_GlobalTag' in self._options.conditions:
812  print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line'
813  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
814 
815  self.loadAndRemember(self.ConditionsDefaultCFF)
816 
817  if self._options.useCondDBv1:
818  from Configuration.AlCa.GlobalTag_condDBv1 import GlobalTag
819  else:
820  from Configuration.AlCa.GlobalTag import GlobalTag
821 
822  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
823 
824  if self._options.useCondDBv1:
825  self.additionalCommands.append('from Configuration.AlCa.GlobalTag_condDBv1 import GlobalTag')
826  else:
827  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
828 
829  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
830 
831  if self._options.slhc:
832  self.loadAndRemember("SLHCUpgradeSimulations/Geometry/fakeConditions_%s_cff"%(self._options.slhc,))
833 
834 
835  def addCustomise(self,unsch=0):
836  """Include the customise code """
837 
838  custOpt=[]
839  if unsch==0:
840  for c in self._options.customisation_file:
841  custOpt.extend(c.split(","))
842  else:
843  for c in self._options.customisation_file_unsch:
844  custOpt.extend(c.split(","))
845 
847  for opt in custOpt:
848  if opt=='': continue
849  if opt.count('.')>1:
850  raise Exception("more than . in the specification:"+opt)
851  fileName=opt.split('.')[0]
852  if opt.count('.')==0: rest='customise'
853  else:
854  rest=opt.split('.')[1]
855  if rest=='py': rest='customise' #catch the case of --customise file.py
856 
857  if fileName in custMap:
858  custMap[fileName].extend(rest.split('+'))
859  else:
860  custMap[fileName]=rest.split('+')
861 
862  if len(custMap)==0:
863  final_snippet='\n'
864  else:
865  final_snippet='\n# customisation of the process.\n'
866 
867  allFcn=[]
868  for opt in custMap:
869  allFcn.extend(custMap[opt])
870  for fcn in allFcn:
871  if allFcn.count(fcn)!=1:
872  raise Exception("cannot specify twice "+fcn+" as a customisation method")
873 
874  for f in custMap:
875  # let python search for that package and do syntax checking at the same time
876  packageName = f.replace(".py","").replace("/",".")
877  __import__(packageName)
878  package = sys.modules[packageName]
879 
880  # now ask the package for its definition and pick .py instead of .pyc
881  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
882 
883  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
884  if self._options.inline_custom:
885  for line in file(customiseFile,'r'):
886  if "import FWCore.ParameterSet.Config" in line:
887  continue
888  final_snippet += line
889  else:
890  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
891  for fcn in custMap[f]:
892  print "customising the process with",fcn,"from",f
893  if not hasattr(package,fcn):
894  #bound to fail at run time
895  raise Exception("config "+f+" has no function "+fcn)
896  #execute the command
897  self.process=getattr(package,fcn)(self.process)
898  #and print it in the configuration
899  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
900  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
901 
902  if len(custMap)!=0:
903  final_snippet += '\n# End of customisation functions\n'
904 
905  ### now for a useful command
906  if unsch==1 or not self._options.runUnscheduled:
907  if self._options.customise_commands:
908  import string
909  final_snippet +='\n# Customisation from command line'
910  for com in self._options.customise_commands.split('\\n'):
911  com=string.lstrip(com)
912  self.executeAndRemember(com)
913  final_snippet +='\n'+com
914 
915  return final_snippet
916 
917  #----------------------------------------------------------------------------
918  # here the methods to define the python includes for each step or
919  # conditions
920  #----------------------------------------------------------------------------
921  def define_Configs(self):
922  if len(self.stepMap):
923  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
924  if self._options.particleTable not in defaultOptions.particleTableList:
925  print 'Invalid particle table provided. Options are:'
926  print defaultOptions.particleTable
927  sys.exit(-1)
928  else:
929  if len(self.stepMap):
930  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
931 
932  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
933 
934  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
935  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
936  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
937  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
938  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
939  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
940  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
941  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
942  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
943  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
944  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
945  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
946  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
947  self.EIDefaultCFF=None
948  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
949  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
950  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
951  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
952  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
953  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
954  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
955  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
956  if self._options.useCondDBv1:
957  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_condDBv1_cff"
958  else:
959  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
960  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
961  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
962 
963  if "DATAMIX" in self.stepMap.keys():
964  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
965  if self._options.datamix == 'PreMix':
966  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDMPreMix_cff"
967  else:
968  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
969  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
970  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
971 
972  if "DIGIPREMIX" in self.stepMap.keys():
973  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_PreMix_cff"
974 
975  self.ALCADefaultSeq=None
976  self.LHEDefaultSeq='externalLHEProducer'
977  self.GENDefaultSeq='pgen'
978  self.SIMDefaultSeq='psim'
979  self.DIGIDefaultSeq='pdigi'
980  self.DIGIPREMIXDefaultSeq='pdigi'
981  self.DIGIPREMIX_S2DefaultSeq='pdigi'
982  self.DATAMIXDefaultSeq=None
983  self.DIGI2RAWDefaultSeq='DigiToRaw'
984  self.HLTDefaultSeq='GRun'
985  self.L1DefaultSeq=None
986  self.L1REPACKDefaultSeq='GT'
987  self.HARVESTINGDefaultSeq=None
988  self.ALCAHARVESTDefaultSeq=None
989  self.CFWRITERDefaultSeq=None
990  self.RAW2DIGIDefaultSeq='RawToDigi'
991  self.L1RecoDefaultSeq='L1Reco'
992  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
993  if 'RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap:
994  self.RECODefaultSeq='reconstruction'
995  else:
996  self.RECODefaultSeq='reconstruction_fromRECO'
997 
998  self.EIDefaultSeq='top'
999  self.POSTRECODefaultSeq=None
1000  self.L1HwValDefaultSeq='L1HwVal'
1001  self.DQMDefaultSeq='DQMOffline'
1002  self.VALIDATIONDefaultSeq=''
1003  self.ENDJOBDefaultSeq='endOfProcess'
1004  self.REPACKDefaultSeq='DigiToRawRepack'
1005  self.PATDefaultSeq='miniAOD'
1006 
1007  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
1008 
1009  if not self._options.beamspot:
1010  self._options.beamspot=VtxSmearedDefaultKey
1011 
1012  # if its MC then change the raw2digi
1013  if self._options.isMC==True:
1014  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
1015  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
1016  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
1017  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
1018  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
1019  else:
1020  self._options.beamspot = None
1021 
1022  #patch for gen, due to backward incompatibility
1023  if 'reGEN' in self.stepMap:
1024  self.GENDefaultSeq='fixGenInfo'
1025 
1026  if self._options.scenario=='cosmics':
1027  self._options.pileup='Cosmics'
1028  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1029  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1030  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1031  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1032  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1033  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1034  if self._options.isMC==True:
1035  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1036  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1037  self.RECODefaultSeq='reconstructionCosmics'
1038  self.DQMDefaultSeq='DQMOfflineCosmics'
1039 
1040  if self._options.scenario=='HeavyIons':
1041  if not self._options.beamspot:
1042  self._options.beamspot=VtxSmearedHIDefaultKey
1043  self.HLTDefaultSeq = 'HIon'
1044  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1045  self.VALIDATIONDefaultSeq=''
1046  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1047  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1048  self.RECODefaultSeq='reconstructionHeavyIons'
1049  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1050  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1051  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1052  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1053  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1054  if self._options.isMC==True:
1055  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1056 
1057 
1058  self.RAW2RECODefaultSeq=','.join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1059 
1060  self.USERDefaultSeq='user'
1061  self.USERDefaultCFF=None
1062 
1063  # the magnetic field
1064  if self._options.isData:
1065  if self._options.magField==defaultOptions.magField:
1066  print "magnetic field option forced to: AutoFromDBCurrent"
1067  self._options.magField='AutoFromDBCurrent'
1068  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1069  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1070 
1071  # the geometry
1072  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1073  self.geometryDBLabel=None
1074  simGeometry=''
1075  if self._options.fast:
1076  if 'start' in self._options.conditions.lower():
1077  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1078  else:
1079  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1080  else:
1081  def inGeometryKeys(opt):
1082  from Configuration.StandardSequences.GeometryConf import GeometryConf
1083  if opt in GeometryConf:
1084  return GeometryConf[opt]
1085  else:
1086  return opt
1087 
1088  geoms=self._options.geometry.split(',')
1089  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1090  if len(geoms)==2:
1091  #may specify the reco geometry
1092  if '/' in geoms[1] or '_cff' in geoms[1]:
1093  self.GeometryCFF=geoms[1]
1094  else:
1095  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1096 
1097  if (geoms[0].startswith('DB:')):
1098  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1099  self.geometryDBLabel=geoms[0][3:]
1100  print "with DB:"
1101  else:
1102  if '/' in geoms[0] or '_cff' in geoms[0]:
1103  self.SimGeometryCFF=geoms[0]
1104  else:
1105  simGeometry=geoms[0]
1106  if self._options.gflash==True:
1107  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1108  else:
1109  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1110 
1111  # synchronize the geometry configuration and the FullSimulation sequence to be used
1112  if simGeometry not in defaultOptions.geometryExtendedOptions:
1113  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1114 
1115  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1116  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1117  self._options.beamspot='NoSmear'
1118 
1119  # if fastsim switch event content
1120  if self._options.fast:
1121  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1122  self.SIMDefaultSeq = 'psim'
1123  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1124  self.RECODefaultSeq= 'reconstruction'
1125  self.EVTCONTDefaultCFF = "FastSimulation.Configuration.EventContent_cff"
1126  self.VALIDATIONDefaultCFF = "FastSimulation.Configuration.Validation_cff"
1127  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1128  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1129  self.DIGIDefaultCFF = 'FastSimulation.Configuration.Digi_cff'
1130  if self._options.datamix == 'PreMix':
1131  self.DIGIDefaultCFF="FastSimulation.Configuration.DigiDMPreMix_cff"
1132  if "DIGIPREMIX" in self.stepMap.keys():
1133  self.DIGIDefaultCFF="FastSimulation.Configuration.Digi_PreMix_cff"
1134  if "DATAMIX" in self.stepMap.keys():
1135  self.DATAMIXDefaultCFF="FastSimulation.Configuration.DataMixer"+self._options.datamix+"_cff"
1136 
1137  self.DIGIDefaultSeq = 'pdigi'
1138  self.L1EMDefaultCFF='FastSimulation.Configuration.SimL1Emulator_cff'
1139  self.L1RecoDefaultCFF='FastSimulation.Configuration.L1Reco_cff'
1140  self.DIGI2RAWDefaultCFF = 'FastSimulation.Configuration.DigiToRaw_cff'
1141  self.DIGI2RAWDefaultSeq = 'DigiToRaw'
1142  self.EVTCONTDefaultCFF = "FastSimulation.Configuration.EventContent_cff"
1143  self.VALIDATIONDefaultCFF = "FastSimulation.Configuration.Validation_cff"
1144 
1145 
1146 
1147  # Mixing
1148  if self._options.pileup=='default':
1149  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1150  self._options.pileup=MixingDefaultKey
1151  # temporary, until digi-reco mixing becomes standard in RelVals
1152  if self._options.fast:
1153  self._options.pileup="GEN_" + MixingDefaultKey
1154 
1155 
1156  #not driven by a default cff anymore
1157  if self._options.isData:
1158  self._options.pileup=None
1159 
1160  if self._options.slhc:
1161  self.GeometryCFF='SLHCUpgradeSimulations.Geometry.%s_cmsSimIdealGeometryXML_cff'%(self._options.slhc,)
1162  if 'stdgeom' not in self._options.slhc:
1163  self.SimGeometryCFF='SLHCUpgradeSimulations.Geometry.%s_cmsSimIdealGeometryXML_cff'%(self._options.slhc,)
1164  self.DIGIDefaultCFF='SLHCUpgradeSimulations/Geometry/Digi_%s_cff'%(self._options.slhc,)
1165  if self._options.pileup!=defaultOptions.pileup:
1166  self._options.pileup='SLHC_%s_%s'%(self._options.pileup,self._options.slhc)
1167 
1168  self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1169 
1170  # for alca, skims, etc
1171  def addExtraStream(self,name,stream,workflow='full'):
1172  # define output module and go from there
1173  output = cms.OutputModule("PoolOutputModule")
1174  if stream.selectEvents.parameters_().__len__()!=0:
1175  output.SelectEvents = stream.selectEvents
1176  else:
1177  output.SelectEvents = cms.untracked.PSet()
1178  output.SelectEvents.SelectEvents=cms.vstring()
1179  if isinstance(stream.paths,tuple):
1180  for path in stream.paths:
1181  output.SelectEvents.SelectEvents.append(path.label())
1182  else:
1183  output.SelectEvents.SelectEvents.append(stream.paths.label())
1184 
1185 
1186 
1187  if isinstance(stream.content,str):
1188  evtPset=getattr(self.process,stream.content)
1189  for p in evtPset.parameters_():
1190  setattr(output,p,getattr(evtPset,p))
1191  if not self._options.inlineEventContent:
1192  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1193  return label
1194  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1195  else:
1196  output.outputCommands = stream.content
1197 
1198 
1199  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1200 
1201  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1202  filterName = cms.untracked.string(stream.name))
1203 
1204  if self._options.filtername:
1205  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1206 
1207  #add an automatic flushing to limit memory consumption
1208  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1209 
1210  if workflow in ("producers,full"):
1211  if isinstance(stream.paths,tuple):
1212  for path in stream.paths:
1213  self.schedule.append(path)
1214  else:
1215  self.schedule.append(stream.paths)
1216 
1217 
1218  # in case of relvals we don't want to have additional outputs
1219  if (not self._options.relval) and workflow in ("full","output"):
1220  self.additionalOutputs[name] = output
1221  setattr(self.process,name,output)
1222 
1223  if workflow == 'output':
1224  # adjust the select events to the proper trigger results from previous process
1225  filterList = output.SelectEvents.SelectEvents
1226  for i, filter in enumerate(filterList):
1227  filterList[i] = filter+":"+self._options.triggerResultsProcess
1228 
1229  return output
1230 
1231  #----------------------------------------------------------------------------
1232  # here the methods to create the steps. Of course we are doing magic here ;)
1233  # prepare_STEPNAME modifies self.process and what else's needed.
1234  #----------------------------------------------------------------------------
1235 
1236  def loadDefaultOrSpecifiedCFF(self, sequence,defaultCFF,unsch=0):
1237  if ( len(sequence.split('.'))==1 ):
1238  l=self.loadAndRemember(defaultCFF,unsch)
1239  elif ( len(sequence.split('.'))==2 ):
1240  l=self.loadAndRemember(sequence.split('.')[0],unsch)
1241  sequence=sequence.split('.')[1]
1242  else:
1243  print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a"
1244  print sequence,"not recognized"
1245  raise
1246  return l
1247 
1248  def scheduleSequence(self,seq,prefix,what='Path'):
1249  if '*' in seq:
1250  #create only one path with all sequences in it
1251  for i,s in enumerate(seq.split('*')):
1252  if i==0:
1253  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1254  else:
1255  p=getattr(self.process,prefix)
1256  p+=getattr(self.process, s)
1257  self.schedule.append(getattr(self.process,prefix))
1258  return
1259  else:
1260  #create as many path as many sequences
1261  if not '+' in seq:
1262  if self.nextScheduleIsConditional:
1263  self.conditionalPaths.append(prefix)
1264  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1265  self.schedule.append(getattr(self.process,prefix))
1266  else:
1267  for i,s in enumerate(seq.split('+')):
1268  sn=prefix+'%d'%(i)
1269  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1270  self.schedule.append(getattr(self.process,sn))
1271  return
1272 
1273  def scheduleSequenceAtEnd(self,seq,prefix):
1274  self.scheduleSequence(seq,prefix,what='EndPath')
1275  return
1276 
1277  def prepare_ALCAPRODUCER(self, sequence = None):
1278  self.prepare_ALCA(sequence, workflow = "producers")
1279 
1280  def prepare_ALCAOUTPUT(self, sequence = None):
1281  self.prepare_ALCA(sequence, workflow = "output")
1282 
1283  def prepare_ALCA(self, sequence = None, workflow = 'full'):
1284  """ Enrich the process with alca streams """
1285  print 'DL enriching',workflow,sequence
1286  alcaConfig=self.loadDefaultOrSpecifiedCFF(sequence,self.ALCADefaultCFF)
1287  sequence = sequence.split('.')[-1]
1288 
1289  # decide which ALCA paths to use
1290  alcaList = sequence.split("+")
1291  maxLevel=0
1292  from Configuration.AlCa.autoAlca import autoAlca
1293  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1294  self.expandMapping(alcaList,autoAlca)
1295  self.AlCaPaths=[]
1296  for name in alcaConfig.__dict__:
1297  alcastream = getattr(alcaConfig,name)
1298  shortName = name.replace('ALCARECOStream','')
1299  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1300  output = self.addExtraStream(name,alcastream, workflow = workflow)
1301  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1302  self.AlCaPaths.append(shortName)
1303  if 'DQM' in alcaList:
1304  if not self._options.inlineEventContent and hasattr(self.process,name):
1305  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1306  else:
1307  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1308 
1309  #rename the HLT process name in the alca modules
1310  if self._options.hltProcess or 'HLT' in self.stepMap:
1311  if isinstance(alcastream.paths,tuple):
1312  for path in alcastream.paths:
1313  self.renameHLTprocessInSequence(path.label())
1314  else:
1315  self.renameHLTprocessInSequence(alcastream.paths.label())
1316 
1317  for i in range(alcaList.count(shortName)):
1318  alcaList.remove(shortName)
1319 
1320  # DQM needs a special handling
1321  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1322  path = getattr(alcaConfig,name)
1323  self.schedule.append(path)
1324  alcaList.remove('DQM')
1325 
1326  if isinstance(alcastream,cms.Path):
1327  #black list the alca path so that they do not appear in the cfg
1328  self.blacklist_paths.append(alcastream)
1329 
1330 
1331  if len(alcaList) != 0:
1332  available=[]
1333  for name in alcaConfig.__dict__:
1334  alcastream = getattr(alcaConfig,name)
1335  if isinstance(alcastream,cms.FilteredStream):
1336  available.append(name.replace('ALCARECOStream',''))
1337  print "The following alcas could not be found "+str(alcaList)
1338  print "available ",available
1339  #print "verify your configuration, ignoring for now"
1340  raise Exception("The following alcas could not be found "+str(alcaList))
1341 
1342  def prepare_LHE(self, sequence = None):
1343  #load the fragment
1344  ##make it loadable
1345  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1346  print "Loading lhe fragment from",loadFragment
1347  __import__(loadFragment)
1348  self.process.load(loadFragment)
1349  ##inline the modules
1350  self._options.inlineObjets+=','+sequence
1351 
1352  getattr(self.process,sequence).nEvents = int(self._options.number)
1353 
1354  #schedule it
1355  self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1356  self.excludedPaths.append("lhe_step")
1357  self.schedule.append( self.process.lhe_step )
1358 
1359  def prepare_GEN(self, sequence = None):
1360  """ load the fragment of generator configuration """
1361  loadFailure=False
1362  #remove trailing .py
1363  #support old style .cfi by changing into something.cfi into something_cfi
1364  #remove python/ from the name
1365  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1366  #standard location of fragments
1367  if not '/' in loadFragment:
1368  loadFragment='Configuration.Generator.'+loadFragment
1369  else:
1370  loadFragment=loadFragment.replace('/','.')
1371  try:
1372  print "Loading generator fragment from",loadFragment
1373  __import__(loadFragment)
1374  except:
1375  loadFailure=True
1376  #if self.process.source and self.process.source.type_()=='EmptySource':
1377  if not (self._options.filein or self._options.dasquery):
1378  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1379 
1380  if not loadFailure:
1381  generatorModule=sys.modules[loadFragment]
1382  genModules=generatorModule.__dict__
1383  #remove lhe producer module since this should have been
1384  #imported instead in the LHE step
1385  if self.LHEDefaultSeq in genModules:
1386  del genModules[self.LHEDefaultSeq]
1387 
1388  if self._options.hideGen:
1389  self.loadAndRemember(loadFragment)
1390  else:
1391  self.process.load(loadFragment)
1392  # expose the objects from that fragment to the configuration
1393  import FWCore.ParameterSet.Modules as cmstypes
1394  for name in genModules:
1395  theObject = getattr(generatorModule,name)
1396  if isinstance(theObject, cmstypes._Module):
1397  self._options.inlineObjets=name+','+self._options.inlineObjets
1398  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1399  self._options.inlineObjets+=','+name
1400 
1401  if sequence == self.GENDefaultSeq or sequence == 'pgen_genonly':
1402  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1403  self.productionFilterSequence = 'ProductionFilterSequence'
1404  elif 'generator' in genModules:
1405  self.productionFilterSequence = 'generator'
1406 
1407  """ Enrich the schedule with the rest of the generation step """
1408  self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1409  genSeqName=sequence.split('.')[-1]
1410 
1411  if True:
1412  try:
1413  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1414  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1415  self.loadAndRemember(cffToBeLoaded)
1416  except ImportError:
1417  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1418 
1419  if self._options.scenario == 'HeavyIons':
1420  if self._options.pileup=='HiMixGEN':
1421  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1422  else:
1423  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1424 
1425  self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1426  self.schedule.append(self.process.generation_step)
1427 
1428  #register to the genstepfilter the name of the path (static right now, but might evolve)
1429  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1430 
1431  if 'reGEN' in self.stepMap:
1432  #stop here
1433  return
1434 
1435  """ Enrich the schedule with the summary of the filter step """
1436  #the gen filter in the endpath
1437  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1438  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1439  return
1440 
1441  def prepare_SIM(self, sequence = None):
1442  """ Enrich the schedule with the simulation step"""
1443  self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1444  if not self._options.fast:
1445  if self._options.gflash==True:
1446  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1447 
1448  if self._options.magField=='0T':
1449  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1450  else:
1451  if self._options.magField=='0T':
1452  self.executeAndRemember("process.famosSimHits.UseMagneticField = cms.bool(False)")
1453 
1454  self.scheduleSequence(sequence.split('.')[-1],'simulation_step')
1455  return
1456 
1457  def prepare_DIGI(self, sequence = None):
1458  """ Enrich the schedule with the digitisation step"""
1459  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1460 
1461  if self._options.gflash==True:
1462  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1463 
1464  if sequence == 'pdigi_valid' or sequence == 'pdigi_valid_nogen' :
1465  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1466 
1467  if sequence != 'pdigi_nogen' and sequence != 'pdigi_valid_nogen' and not self.process.source.type_()=='EmptySource':
1468  if self._options.inputEventContent=='':
1469  self._options.inputEventContent='REGEN'
1470  else:
1471  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1472 
1473 
1474  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1475  return
1476 
1477  def prepare_DIGIPREMIX(self, sequence = None):
1478  """ Enrich the schedule with the digitisation step"""
1479  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1480 
1481  self.loadAndRemember("SimGeneral/MixingModule/digi_noNoise_cfi")
1482 
1483  if sequence == 'pdigi_valid':
1484  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1485  else:
1486  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1487 
1488  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1489  return
1490 
1491  def prepare_DIGIPREMIX_S2(self, sequence = None):
1492  """ Enrich the schedule with the digitisation step"""
1493  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1494 
1495  self.loadAndRemember("SimGeneral/MixingModule/digi_MixPreMix_cfi")
1496 
1497 
1498  if sequence == 'pdigi_valid':
1499  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1500  else:
1501  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1502 
1503  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1504  return
1505 
1506  def prepare_CFWRITER(self, sequence = None):
1507  """ Enrich the schedule with the crossing frame writer step"""
1508  self.loadAndRemember(self.CFWRITERDefaultCFF)
1509  self.scheduleSequence('pcfw','cfwriter_step')
1510  return
1511 
1512  def prepare_DATAMIX(self, sequence = None):
1513  """ Enrich the schedule with the digitisation step"""
1514  self.loadAndRemember(self.DATAMIXDefaultCFF)
1515  self.scheduleSequence('pdatamix','datamixing_step')
1516 
1517  if self._options.pileup_input:
1518  theFiles=''
1519  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1520  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1521  elif self._options.pileup_input.startswith("filelist:"):
1522  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1523  else:
1524  theFiles=self._options.pileup_input.split(',')
1525  #print theFiles
1526  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1527 
1528  return
1529 
1530  def prepare_DIGI2RAW(self, sequence = None):
1531  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGI2RAWDefaultCFF)
1532  self.scheduleSequence(sequence.split('.')[-1],'digi2raw_step')
1533  if "DIGIPREMIX" in self.stepMap.keys():
1534  self.executeAndRemember("process.esDigiToRaw.Label = cms.string('mix')") ##terrible hack - bypass zero suppression
1535  self.executeAndRemember("process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')") ##special readout mode for StripTracker
1536 
1537  return
1538 
1539  def prepare_REPACK(self, sequence = None):
1540  self.loadDefaultOrSpecifiedCFF(sequence,self.REPACKDefaultCFF)
1541  self.scheduleSequence(sequence.split('.')[-1],'digi2repack_step')
1542  return
1543 
1544  def prepare_L1(self, sequence = None):
1545  """ Enrich the schedule with the L1 simulation step"""
1546  assert(sequence == None)
1547  self.loadAndRemember(self.L1EMDefaultCFF)
1548  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1549  return
1550 
1551  def prepare_L1REPACK(self, sequence = None):
1552  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1553  supported = ['GT','GT1','GT2','GCTGT']
1554  if sequence in supported:
1555  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1556  if self._options.scenario == 'HeavyIons':
1557  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1558  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1559  else:
1560  print "L1REPACK with '",sequence,"' is not supported! Supported choices are: ",supported
1561  raise Exception('unsupported feature')
1562 
1563 
1564  def prepare_HLT(self, sequence = None):
1565  """ Enrich the schedule with the HLT simulation step"""
1566  if not sequence:
1567  print "no specification of the hlt menu has been given, should never happen"
1568  raise Exception('no HLT sequence provided')
1569 
1570  if '@' in sequence:
1571  # case where HLT:@something was provided
1572  from Configuration.HLT.autoHLT import autoHLT
1573  key = sequence[1:]
1574  if key in autoHLT:
1575  sequence = autoHLT[key]
1576  else:
1577  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1578 
1579  if ',' in sequence:
1580  #case where HLT:something:something was provided
1581  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1582  optionsForHLT = {}
1583  if self._options.scenario == 'HeavyIons':
1584  optionsForHLT['type'] = 'HIon'
1585  else:
1586  optionsForHLT['type'] = 'GRun'
1587  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.iteritems())
1588  if sequence == 'run,fromSource':
1589  if hasattr(self.process.source,'firstRun'):
1590  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1591  elif hasattr(self.process.source,'setRunNumber'):
1592  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1593  else:
1594  raise Exception('Cannot replace menu to load %s'%(sequence))
1595  else:
1596  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(sequence.replace(',',':'),optionsForHLTConfig))
1597  else:
1598  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % sequence)
1599 
1600  if self._options.isMC:
1601  if self._options.fast:
1602  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFastSim")
1603  else:
1604  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFullSim")
1605 
1606  if self._options.name != 'HLT':
1607  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1608  self.additionalCommands.append('process = ProcessName(process)')
1609  self.additionalCommands.append('')
1610  from HLTrigger.Configuration.CustomConfigs import ProcessName
1611  self.process = ProcessName(self.process)
1612 
1613  self.schedule.append(self.process.HLTSchedule)
1614  [self.blacklist_paths.append(path) for path in self.process.HLTSchedule if isinstance(path,(cms.Path,cms.EndPath))]
1615 
1616  #this is a fake, to be removed with fastim migration and HLT menu dump
1617  if self._options.fast:
1618  if not hasattr(self.process,'HLTEndSequence'):
1619  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1620 
1621 
1622  def prepare_RAW2RECO(self, sequence = None):
1623  if ','in sequence:
1624  seqReco=sequence.split(',')[1]
1625  seqDigi=sequence.split(',')[0]
1626  else:
1627  print "RAW2RECO requires two specifications",sequence,"insufficient"
1628 
1629  self.prepare_RAW2DIGI(seqDigi)
1630  self.prepare_RECO(seqReco)
1631  return
1632 
1633  def prepare_RAW2DIGI(self, sequence = "RawToDigi"):
1634  self.loadDefaultOrSpecifiedCFF(sequence,self.RAW2DIGIDefaultCFF)
1635  self.scheduleSequence(sequence,'raw2digi_step')
1636  # if self._options.isRepacked:
1637  #self.renameInputTagsInSequence(sequence)
1638  return
1639 
1640  def prepare_PATFILTER(self, sequence=None):
1641  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1642  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1643  for filt in allMetFilterPaths:
1644  self.schedule.append(getattr(self.process,'Flag_'+filt))
1645 
1646  def prepare_L1HwVal(self, sequence = 'L1HwVal'):
1647  ''' Enrich the schedule with L1 HW validation '''
1648  self.loadDefaultOrSpecifiedCFF(sequence,self.L1HwValDefaultCFF)
1649  #self.scheduleSequence(sequence.split('.')[-1],'l1hwval_step')
1650  print '\n\n\n DEPRECATED this has no action \n\n\n'
1651  return
1652 
1653  def prepare_L1Reco(self, sequence = "L1Reco"):
1654  ''' Enrich the schedule with L1 reconstruction '''
1655  self.loadDefaultOrSpecifiedCFF(sequence,self.L1RecoDefaultCFF)
1656  self.scheduleSequence(sequence.split('.')[-1],'L1Reco_step')
1657  return
1658 
1659  def prepare_L1TrackTrigger(self, sequence = "L1TrackTrigger"):
1660  ''' Enrich the schedule with L1 reconstruction '''
1662  self.scheduleSequence(sequence.split('.')[-1],'L1TrackTrigger_step')
1663  return
1664 
1665  def prepare_FILTER(self, sequence = None):
1666  ''' Enrich the schedule with a user defined filter sequence '''
1667  ## load the relevant part
1668  filterConfig=self.load(sequence.split('.')[0])
1669  filterSeq=sequence.split('.')[-1]
1670  ## print it in the configuration
1672  def __init__(self):
1673  self.inliner=''
1674  pass
1675  def enter(self,visitee):
1676  try:
1677  label=visitee.label()
1678  ##needs to be in reverse order
1679  self.inliner=label+','+self.inliner
1680  except:
1681  pass
1682  def leave(self,v): pass
1683 
1684  expander=PrintAllModules()
1685  getattr(self.process,filterSeq).visit( expander )
1686  self._options.inlineObjets+=','+expander.inliner
1687  self._options.inlineObjets+=','+filterSeq
1688 
1689  ## put the filtering path in the schedule
1690  self.scheduleSequence(filterSeq,'filtering_step')
1691  self.nextScheduleIsConditional=True
1692  ## put it before all the other paths
1693  self.productionFilterSequence = filterSeq
1694 
1695  return
1696 
1697  def prepare_RECO(self, sequence = "reconstruction"):
1698  ''' Enrich the schedule with reconstruction '''
1699  self.loadDefaultOrSpecifiedCFF(sequence,self.RECODefaultCFF)
1700  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_step')
1701  return
1702 
1703  def prepare_RECOBEFMIX(self, sequence = "reconstruction"):
1704  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1705  if not self._options.fast:
1706  print "ERROR: this step is only implemented for FastSim"
1707  sys.exit()
1708  self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1709  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_befmix_step')
1710  return
1711 
1712  def prepare_PAT(self, sequence = "miniAOD"):
1713  ''' Enrich the schedule with PAT '''
1714  self.prepare_PATFILTER(self)
1715  self.loadDefaultOrSpecifiedCFF(sequence,self.PATDefaultCFF,1) #this is unscheduled
1716  if not self._options.runUnscheduled:
1717  raise Exception("MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1718  if self._options.isData:
1719  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1720  else:
1721  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1722  if self._options.fast:
1723  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff.miniAOD_customizeMETFiltersFastSim")
1724  return
1725 
1726  def prepare_EI(self, sequence = None):
1727  ''' Enrich the schedule with event interpretation '''
1728  from Configuration.StandardSequences.EventInterpretation import EventInterpretation
1729  if sequence in EventInterpretation:
1730  self.EIDefaultCFF = EventInterpretation[sequence]
1731  sequence = 'EIsequence'
1732  else:
1733  raise Exception('Cannot set %s event interpretation'%( sequence) )
1734  self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1735  self.scheduleSequence(sequence.split('.')[-1],'eventinterpretaion_step')
1736  return
1737 
1738  def prepare_SKIM(self, sequence = "all"):
1739  ''' Enrich the schedule with skimming fragments'''
1740  skimConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.SKIMDefaultCFF)
1741  sequence = sequence.split('.')[-1]
1742 
1743  skimlist=sequence.split('+')
1744  ## support @Mu+DiJet+@Electron configuration via autoSkim.py
1745  from Configuration.Skimming.autoSkim import autoSkim
1746  self.expandMapping(skimlist,autoSkim)
1747 
1748  #print "dictionnary for skims:",skimConfig.__dict__
1749  for skim in skimConfig.__dict__:
1750  skimstream = getattr(skimConfig,skim)
1751  if isinstance(skimstream,cms.Path):
1752  #black list the alca path so that they do not appear in the cfg
1753  self.blacklist_paths.append(skimstream)
1754  if (not isinstance(skimstream,cms.FilteredStream)):
1755  continue
1756  shortname = skim.replace('SKIMStream','')
1757  if (sequence=="all"):
1758  self.addExtraStream(skim,skimstream)
1759  elif (shortname in skimlist):
1760  self.addExtraStream(skim,skimstream)
1761  #add a DQM eventcontent for this guy
1762  if self._options.datatier=='DQM':
1763  self.process.load(self.EVTCONTDefaultCFF)
1764  skimstreamDQM = cms.FilteredStream(
1765  responsible = skimstream.responsible,
1766  name = skimstream.name+'DQM',
1767  paths = skimstream.paths,
1768  selectEvents = skimstream.selectEvents,
1769  content = self._options.datatier+'EventContent',
1770  dataTier = cms.untracked.string(self._options.datatier)
1771  )
1772  self.addExtraStream(skim+'DQM',skimstreamDQM)
1773  for i in range(skimlist.count(shortname)):
1774  skimlist.remove(shortname)
1775 
1776 
1777 
1778  if (skimlist.__len__()!=0 and sequence!="all"):
1779  print 'WARNING, possible typo with SKIM:'+'+'.join(skimlist)
1780  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1781 
1782  def prepare_USER(self, sequence = None):
1783  ''' Enrich the schedule with a user defined sequence '''
1784  self.loadDefaultOrSpecifiedCFF(sequence,self.USERDefaultCFF)
1785  self.scheduleSequence(sequence.split('.')[-1],'user_step')
1786  return
1787 
1788  def prepare_POSTRECO(self, sequence = None):
1789  """ Enrich the schedule with the postreco step """
1790  self.loadAndRemember(self.POSTRECODefaultCFF)
1791  self.scheduleSequence('postreco_generator','postreco_step')
1792  return
1793 
1794 
1795  def prepare_VALIDATION(self, sequence = 'validation'):
1796  print sequence,"in preparing validation"
1797  self.loadDefaultOrSpecifiedCFF(sequence,self.VALIDATIONDefaultCFF)
1798  from Validation.Configuration.autoValidation import autoValidation
1799  #in case VALIDATION:something:somethingelse -> something,somethingelse
1800  sequence=sequence.split('.')[-1]
1801  if sequence.find(',')!=-1:
1802  prevalSeqName=sequence.split(',')[0].split('+')
1803  valSeqName=sequence.split(',')[1].split('+')
1804  self.expandMapping(prevalSeqName,autoValidation,index=0)
1805  self.expandMapping(valSeqName,autoValidation,index=1)
1806  else:
1807  if '@' in sequence:
1808  prevalSeqName=sequence.split('+')
1809  valSeqName=sequence.split('+')
1810  self.expandMapping(prevalSeqName,autoValidation,index=0)
1811  self.expandMapping(valSeqName,autoValidation,index=1)
1812  else:
1813  postfix=''
1814  if sequence:
1815  postfix='_'+sequence
1816  prevalSeqName=['prevalidation'+postfix]
1817  valSeqName=['validation'+postfix]
1818  if not hasattr(self.process,valSeqName[0]):
1819  prevalSeqName=['']
1820  valSeqName=[sequence]
1821 
1822  def NFI(index):
1823  ##name from index, required to keep backward compatibility
1824  if index==0:
1825  return ''
1826  else:
1827  return '%s'%index
1828 
1829  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1830  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1831  self._options.restoreRNDSeeds=True
1832 
1833  #rename the HLT process in validation steps
1834  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1835  for s in valSeqName+prevalSeqName:
1836  if s:
1838  for (i,s) in enumerate(prevalSeqName):
1839  if s:
1840  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1841  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1842 
1843  for (i,s) in enumerate(valSeqName):
1844  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1845  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1846 
1847  if not 'DIGI' in self.stepMap and not self._options.fast:
1848  self.executeAndRemember("process.mix.playback = True")
1849  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1850  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1851  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1852 
1853  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1854  #will get in the schedule, smoothly
1855  for (i,s) in enumerate(valSeqName):
1856  getattr(self.process,'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,'validation_step%s'%NFI(i))._seq
1857 
1858  return
1859 
1860 
1862  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1863  It will climb down within PSets, VPSets and VInputTags to find its target"""
1864  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1865  self._paramReplace = paramReplace
1866  self._paramSearch = paramSearch
1867  self._verbose = verbose
1868  self._whitelist = whitelist
1869 
1870  def doIt(self,pset,base):
1871  if isinstance(pset, cms._Parameterizable):
1872  for name in pset.parameters_().keys():
1873  # skip whitelisted parameters
1874  if name in self._whitelist:
1875  continue
1876  # if I use pset.parameters_().items() I get copies of the parameter values
1877  # so I can't modify the nested pset
1878  value = getattr(pset,name)
1879  type = value.pythonTypeName()
1880  if type in ('cms.PSet', 'cms.untracked.PSet'):
1881  self.doIt(value,base+"."+name)
1882  elif type in ('cms.VPSet', 'cms.untracked.VPSet'):
1883  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1884  elif type in ('cms.string', 'cms.untracked.string'):
1885  if value.value() == self._paramSearch:
1886  if self._verbose: print "set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace)
1887  setattr(pset, name,self._paramReplace)
1888  elif type in ('cms.VInputTag', 'cms.untracked.VInputTag'):
1889  for (i,n) in enumerate(value):
1890  if not isinstance(n, cms.InputTag):
1891  n=cms.InputTag(n)
1892  if n.processName == self._paramSearch:
1893  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1894  if self._verbose:print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace)
1895  setattr(n,"processName",self._paramReplace)
1896  value[i]=n
1897  elif type in ('cms.vstring', 'cms.untracked.vstring'):
1898  for (i,n) in enumerate(value):
1899  if n==self._paramSearch:
1900  getattr(pset,name)[i]=self._paramReplace
1901  elif type in ('cms.InputTag', 'cms.untracked.InputTag'):
1902  if value.processName == self._paramSearch:
1903  if self._verbose: print "set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace)
1904  setattr(getattr(pset, name),"processName",self._paramReplace)
1905 
1906  def enter(self,visitee):
1907  label = ''
1908  try:
1909  label = visitee.label()
1910  except AttributeError:
1911  label = '<Module not in a Process>'
1912  except:
1913  label = 'other execption'
1914  self.doIt(visitee, label)
1915 
1916  def leave(self,visitee):
1917  pass
1918 
1919  #visit a sequence to repalce all input tags
1920  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1921  print "Replacing all InputTag %s => %s"%(oldT,newT)
1922  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1923  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1924  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1925  if not loadMe in self.additionalCommands:
1926  self.additionalCommands.append(loadMe)
1927  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1928 
1929  #change the process name used to address HLT results in any sequence
1930  def renameHLTprocessInSequence(self,sequence,proc=None,HLTprocess='HLT'):
1931  if self._options.hltProcess:
1932  proc=self._options.hltProcess
1933  else:
1934  proc=self.process.name_()
1935  if proc==HLTprocess: return
1936  # look up all module in dqm sequence
1937  print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1938  getattr(self.process,sequence).visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess,proc,whitelist = ("subSystemFolder",)))
1939  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1940  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1941  self.additionalCommands.append('process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1942 
1943 
1944  def expandMapping(self,seqList,mapping,index=None):
1945  maxLevel=20
1946  level=0
1947  while '@' in repr(seqList) and level<maxLevel:
1948  level+=1
1949  for specifiedCommand in seqList:
1950  if specifiedCommand.startswith('@'):
1951  location=specifiedCommand[1:]
1952  if not location in mapping:
1953  raise Exception("Impossible to map "+location+" from "+repr(mapping))
1954  mappedTo=mapping[location]
1955  if index!=None:
1956  mappedTo=mappedTo[index]
1957  seqList.remove(specifiedCommand)
1958  seqList.extend(mappedTo.split('+'))
1959  break;
1960  if level==maxLevel:
1961  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
1962 
1963  def prepare_DQM(self, sequence = 'DQMOffline'):
1964  # this one needs replacement
1965 
1966  self.loadDefaultOrSpecifiedCFF(sequence,self.DQMOFFLINEDefaultCFF)
1967  sequenceList=sequence.split('.')[-1].split('+')
1968  from DQMOffline.Configuration.autoDQM import autoDQM
1969  self.expandMapping(sequenceList,autoDQM,index=0)
1970 
1971  if len(set(sequenceList))!=len(sequenceList):
1972  sequenceList=list(set(sequenceList))
1973  print "Duplicate entries for DQM:, using",sequenceList
1974  pathName='dqmoffline_step'
1975 
1976  for (i,sequence) in enumerate(sequenceList):
1977  if (i!=0):
1978  pathName='dqmoffline_%d_step'%(i)
1979 
1980  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
1981  self.renameHLTprocessInSequence(sequence)
1982 
1983  # if both HLT and DQM are run in the same process, schedule [HLT]DQM in an EndPath
1984  if 'HLT' in self.stepMap.keys():
1985  # need to put [HLT]DQM in an EndPath, to access the HLT trigger results
1986  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1987  else:
1988  # schedule DQM as a standard Path
1989  setattr(self.process,pathName, cms.Path( getattr(self.process, sequence) ) )
1990  self.schedule.append(getattr(self.process,pathName))
1991 
1992 
1993  def prepare_HARVESTING(self, sequence = None):
1994  """ Enrich the process with harvesting step """
1995  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
1996  self.loadAndRemember(self.DQMSaverCFF)
1997 
1998  harvestingConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.HARVESTINGDefaultCFF)
1999  sequence = sequence.split('.')[-1]
2000 
2001  # decide which HARVESTING paths to use
2002  harvestingList = sequence.split("+")
2003  from DQMOffline.Configuration.autoDQM import autoDQM
2004  from Validation.Configuration.autoValidation import autoValidation
2005  import copy
2006  combined_mapping = copy.deepcopy( autoDQM )
2007  combined_mapping.update( autoValidation )
2008  self.expandMapping(harvestingList,combined_mapping,index=-1)
2009 
2010  if len(set(harvestingList))!=len(harvestingList):
2011  harvestingList=list(set(harvestingList))
2012  print "Duplicate entries for HARVESTING, using",harvestingList
2013 
2014  for name in harvestingList:
2015  if not name in harvestingConfig.__dict__:
2016  print name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
2017  continue
2018  harvestingstream = getattr(harvestingConfig,name)
2019  if isinstance(harvestingstream,cms.Path):
2020  self.schedule.append(harvestingstream)
2021  self.blacklist_paths.append(harvestingstream)
2022  if isinstance(harvestingstream,cms.Sequence):
2023  setattr(self.process,name+"_step",cms.Path(harvestingstream))
2024  self.schedule.append(getattr(self.process,name+"_step"))
2025 
2026  self.scheduleSequence('DQMSaver','dqmsave_step')
2027  return
2028 
2029  def prepare_ALCAHARVEST(self, sequence = None):
2030  """ Enrich the process with AlCaHarvesting step """
2031  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2032  sequence=sequence.split(".")[-1]
2033 
2034  # decide which AlcaHARVESTING paths to use
2035  harvestingList = sequence.split("+")
2036 
2037 
2038 
2039  from Configuration.AlCa.autoPCL import autoPCL
2040  self.expandMapping(harvestingList,autoPCL)
2041 
2042  for name in harvestingConfig.__dict__:
2043  harvestingstream = getattr(harvestingConfig,name)
2044  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2045  self.schedule.append(harvestingstream)
2046  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2047  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2048  harvestingList.remove(name)
2049  # append the common part at the end of the sequence
2050  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2051  self.schedule.append(lastStep)
2052 
2053  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2054  print "The following harvesting could not be found : ", harvestingList
2055  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2056 
2057 
2058 
2059  def prepare_ENDJOB(self, sequence = 'endOfProcess'):
2060  self.loadDefaultOrSpecifiedCFF(sequence,self.ENDJOBDefaultCFF)
2061  self.scheduleSequenceAtEnd(sequence.split('.')[-1],'endjob_step')
2062  return
2063 
2065  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2066  self.schedule.append(self.process.reconstruction)
2067 
2068 
2069  def build_production_info(self, evt_type, evtnumber):
2070  """ Add useful info for the production. """
2071  self.process.configurationMetadata=cms.untracked.PSet\
2072  (version=cms.untracked.string("$Revision: 1.19 $"),
2073  name=cms.untracked.string("Applications"),
2074  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2075  )
2076 
2077  self.addedObjects.append(("Production Info","configurationMetadata"))
2078 
2079 
2080  def prepare(self, doChecking = False):
2081  """ Prepare the configuration string and add missing pieces."""
2082 
2083  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2084  self.addMaxEvents()
2085  if self.with_input:
2086  self.addSource()
2087  self.addStandardSequences()
2088  ##adding standard sequences might change the inputEventContent option and therefore needs to be finalized after
2089  self.completeInputCommand()
2090  self.addConditions()
2091 
2092 
2093  outputModuleCfgCode=""
2094  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2095  outputModuleCfgCode=self.addOutput()
2096 
2097  self.addCommon()
2098 
2099  self.pythonCfgCode = "# Auto generated configuration file\n"
2100  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2101  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2102  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2103  if hasattr(self._options,"era") and self._options.era :
2104  self.pythonCfgCode += "from Configuration.StandardSequences.Eras import eras\n\n"
2105  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"'" # Start of the line, finished after the loop
2106  # Multiple eras can be specified in a comma seperated list
2107  for requestedEra in self._options.era.split(",") :
2108  self.pythonCfgCode += ",eras."+requestedEra
2109  self.pythonCfgCode += ")\n\n" # end of the line
2110  else :
2111  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"')\n\n"
2112 
2113  self.pythonCfgCode += "# import of standard configurations\n"
2114  for module in self.imports:
2115  self.pythonCfgCode += ("process.load('"+module+"')\n")
2116 
2117  # production info
2118  if not hasattr(self.process,"configurationMetadata"):
2119  self.build_production_info(self._options.evt_type, self._options.number)
2120  else:
2121  #the PSet was added via a load
2122  self.addedObjects.append(("Production Info","configurationMetadata"))
2123 
2124  self.pythonCfgCode +="\n"
2125  for comment,object in self.addedObjects:
2126  if comment!="":
2127  self.pythonCfgCode += "\n# "+comment+"\n"
2128  self.pythonCfgCode += dumpPython(self.process,object)
2129 
2130  # dump the output definition
2131  self.pythonCfgCode += "\n# Output definition\n"
2132  self.pythonCfgCode += outputModuleCfgCode
2133 
2134  # dump all additional outputs (e.g. alca or skim streams)
2135  self.pythonCfgCode += "\n# Additional output definition\n"
2136  #I do not understand why the keys are not normally ordered.
2137  nl=self.additionalOutputs.keys()
2138  nl.sort()
2139  for name in nl:
2140  output = self.additionalOutputs[name]
2141  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2142  tmpOut = cms.EndPath(output)
2143  setattr(self.process,name+'OutPath',tmpOut)
2144  self.schedule.append(tmpOut)
2145 
2146  # dump all additional commands
2147  self.pythonCfgCode += "\n# Other statements\n"
2148  for command in self.additionalCommands:
2149  self.pythonCfgCode += command + "\n"
2150 
2151  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2152  for object in self._options.inlineObjets.split(','):
2153  if not object:
2154  continue
2155  if not hasattr(self.process,object):
2156  print 'cannot inline -'+object+'- : not known'
2157  else:
2158  self.pythonCfgCode +='\n'
2159  self.pythonCfgCode +=dumpPython(self.process,object)
2160 
2161  # dump all paths
2162  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2163  for path in self.process.paths:
2164  if getattr(self.process,path) not in self.blacklist_paths:
2165  self.pythonCfgCode += dumpPython(self.process,path)
2166 
2167  for endpath in self.process.endpaths:
2168  if getattr(self.process,endpath) not in self.blacklist_paths:
2169  self.pythonCfgCode += dumpPython(self.process,endpath)
2170 
2171  # dump the schedule
2172  self.pythonCfgCode += "\n# Schedule definition\n"
2173  result = "process.schedule = cms.Schedule("
2174 
2175  # handling of the schedule
2176  self.process.schedule = cms.Schedule()
2177  for item in self.schedule:
2178  if not isinstance(item, cms.Schedule):
2179  self.process.schedule.append(item)
2180  else:
2181  self.process.schedule.extend(item)
2182 
2183  if hasattr(self.process,"HLTSchedule"):
2184  beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2185  afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2186  pathNames = ['process.'+p.label_() for p in beforeHLT]
2187  result += ','.join(pathNames)+')\n'
2188  result += 'process.schedule.extend(process.HLTSchedule)\n'
2189  pathNames = ['process.'+p.label_() for p in afterHLT]
2190  result += 'process.schedule.extend(['+','.join(pathNames)+'])\n'
2191  else:
2192  pathNames = ['process.'+p.label_() for p in self.schedule]
2193  result ='process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2194 
2195  self.pythonCfgCode += result
2196 
2197  if self._options.nThreads is not "1":
2198  self.pythonCfgCode +="\n"
2199  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2200  self.pythonCfgCode +="process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+")\n"
2201  self.pythonCfgCode +="process.options.numberOfStreams=cms.untracked.uint32(0)\n"
2202  #repacked version
2203  if self._options.isRepacked:
2204  self.pythonCfgCode +="\n"
2205  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2206  self.pythonCfgCode +="MassReplaceInputTag(process)\n"
2207  MassReplaceInputTag(self.process)
2208 
2209  # special treatment in case of production filter sequence 2/2
2210  if self.productionFilterSequence:
2211  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2212  self.pythonCfgCode +='for path in process.paths:\n'
2213  if len(self.conditionalPaths):
2214  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2215  if len(self.excludedPaths):
2216  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2217  self.pythonCfgCode +='\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2218  pfs = getattr(self.process,self.productionFilterSequence)
2219  for path in self.process.paths:
2220  if not path in self.conditionalPaths: continue
2221  if path in self.excludedPaths: continue
2222  getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2223 
2224 
2225  # dump customise fragment
2226  self.pythonCfgCode += self.addCustomise()
2227 
2228  if self._options.runUnscheduled:
2229  # prune and delete paths
2230  #this is not supporting the blacklist at this point since I do not understand it
2231  self.pythonCfgCode+="#do not add changes to your config after this point (unless you know what you are doing)\n"
2232  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import convertToUnscheduled\n"
2233  self.pythonCfgCode+="process=convertToUnscheduled(process)\n"
2234 
2235  from FWCore.ParameterSet.Utilities import convertToUnscheduled
2236  self.process=convertToUnscheduled(self.process)
2237 
2238  #now add the unscheduled stuff
2239  for module in self.importsUnsch:
2240  self.process.load(module)
2241  self.pythonCfgCode += ("process.load('"+module+"')\n")
2242 
2243  #and clean the unscheduled stuff
2244  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import cleanUnscheduled\n"
2245  self.pythonCfgCode+="process=cleanUnscheduled(process)\n"
2246 
2247  from FWCore.ParameterSet.Utilities import cleanUnscheduled
2248  self.process=cleanUnscheduled(self.process)
2249 
2250 
2251  self.pythonCfgCode += self.addCustomise(1)
2252 
2253 
2254  # make the .io file
2255 
2256  if self._options.io:
2257  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2258  if not self._options.io.endswith('.io'): self._option.io+='.io'
2259  io=open(self._options.io,'w')
2260  ioJson={}
2261  if hasattr(self.process.source,"fileNames"):
2262  if len(self.process.source.fileNames.value()):
2263  ioJson['primary']=self.process.source.fileNames.value()
2264  if hasattr(self.process.source,"secondaryFileNames"):
2265  if len(self.process.source.secondaryFileNames.value()):
2266  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2267  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2268  ioJson['pileup']=self._options.pileup_input[4:]
2269  for (o,om) in self.process.outputModules_().items():
2270  ioJson[o]=om.fileName.value()
2271  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2272  if self.productionFilterSequence:
2273  ioJson['filter']=self.productionFilterSequence
2274  import json
2275  io.write(json.dumps(ioJson))
2276  return
2277 
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:34
assert(m_qm.get())
def visit
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_RECO
put the filtering path in the schedule
def massSearchReplaceAnyInputTag
Definition: helpers.py:262
def defineMixing
Definition: Mixing.py:167
inliner
needs to be in reverse order
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def convertToUnscheduled
Definition: Utilities.py:69
list object
Definition: dbtoconf.py:77
def cleanUnscheduled
Definition: Utilities.py:107
double split
Definition: MVATrainer.cc:139
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run