CMS 3D CMS Logo

ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python
2 
3 __version__ = "$Revision: 1.19 $"
4 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
5 
6 import FWCore.ParameterSet.Config as cms
7 from FWCore.ParameterSet.Modules import _Module
8 
9 # The following import is provided for backward compatibility reasons.
10 # The function used to be defined in this file.
11 from FWCore.ParameterSet.MassReplace import massReplaceInputTag as MassReplaceInputTag
12 
13 import sys
14 import re
15 import collections
16 from subprocess import Popen,PIPE
17 import FWCore.ParameterSet.DictTypes as DictTypes
18 class Options:
19  pass
20 
21 # the canonical defaults
22 defaultOptions = Options()
23 defaultOptions.datamix = 'DataOnSim'
24 defaultOptions.isMC=False
25 defaultOptions.isData=True
26 defaultOptions.step=''
27 defaultOptions.pileup='NoPileUp'
28 defaultOptions.pileup_input = None
29 defaultOptions.pileup_dasoption = ''
30 defaultOptions.geometry = 'SimDB'
31 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
32 defaultOptions.magField = ''
33 defaultOptions.conditions = None
34 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
35 defaultOptions.harvesting= 'AtRunEnd'
36 defaultOptions.gflash = False
37 defaultOptions.number = -1
38 defaultOptions.number_out = None
39 defaultOptions.arguments = ""
40 defaultOptions.name = "NO NAME GIVEN"
41 defaultOptions.evt_type = ""
42 defaultOptions.filein = ""
43 defaultOptions.dasquery=""
44 defaultOptions.dasoption=""
45 defaultOptions.secondfilein = ""
46 defaultOptions.customisation_file = []
47 defaultOptions.customisation_file_unsch = []
48 defaultOptions.customise_commands = ""
49 defaultOptions.inline_custom=False
50 defaultOptions.particleTable = 'pythiapdt'
51 defaultOptions.particleTableList = ['pythiapdt','pdt']
52 defaultOptions.dirin = ''
53 defaultOptions.dirout = ''
54 defaultOptions.filetype = 'EDM'
55 defaultOptions.fileout = 'output.root'
56 defaultOptions.filtername = ''
57 defaultOptions.lazy_download = False
58 defaultOptions.custom_conditions = ''
59 defaultOptions.hltProcess = ''
60 defaultOptions.eventcontent = None
61 defaultOptions.datatier = None
62 defaultOptions.inlineEventContent = True
63 defaultOptions.inlineObjets =''
64 defaultOptions.hideGen=False
65 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
66 defaultOptions.beamspot=None
67 defaultOptions.outputDefinition =''
68 defaultOptions.inputCommands = None
69 defaultOptions.outputCommands = None
70 defaultOptions.inputEventContent = ''
71 defaultOptions.dropDescendant = False
72 defaultOptions.relval = None
73 defaultOptions.profile = None
74 defaultOptions.isRepacked = False
75 defaultOptions.restoreRNDSeeds = False
76 defaultOptions.donotDropOnInput = ''
77 defaultOptions.python_filename =''
78 defaultOptions.io=None
79 defaultOptions.lumiToProcess=None
80 defaultOptions.fast=False
81 defaultOptions.runsAndWeightsForMC = None
82 defaultOptions.runsScenarioForMC = None
83 defaultOptions.runUnscheduled = False
84 defaultOptions.timeoutOutput = False
85 defaultOptions.nThreads = '1'
86 
87 # some helper routines
88 def dumpPython(process,name):
89  theObject = getattr(process,name)
90  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
91  return "process."+name+" = " + theObject.dumpPython("process")
92  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
93  return "process."+name+" = " + theObject.dumpPython()+"\n"
94  else:
95  return "process."+name+" = " + theObject.dumpPython()+"\n"
96 def filesFromList(fileName,s=None):
97  import os
98  import FWCore.ParameterSet.Config as cms
99  prim=[]
100  sec=[]
101  for line in open(fileName,'r'):
102  if line.count(".root")>=2:
103  #two files solution...
104  entries=line.replace("\n","").split()
105  if not entries[0] in prim:
106  prim.append(entries[0])
107  if not entries[1] in sec:
108  sec.append(entries[1])
109  elif (line.find(".root")!=-1):
110  entry=line.replace("\n","")
111  if not entry in prim:
112  prim.append(entry)
113  if s:
114  if not hasattr(s,"fileNames"):
115  s.fileNames=cms.untracked.vstring(prim)
116  else:
117  s.fileNames.extend(prim)
118  if len(sec)!=0:
119  if not hasattr(s,"secondaryFileNames"):
120  s.secondaryFileNames=cms.untracked.vstring(sec)
121  else:
122  s.secondaryFileNames.extend(sec)
123  print "found files: ",prim
124  if len(prim)==0:
125  raise Exception("There are not files in input from the file list")
126  if len(sec)!=0:
127  print "found parent files:",sec
128  return (prim,sec)
129 
130 def filesFromDASQuery(query,option="",s=None):
131  import os,time
132  import FWCore.ParameterSet.Config as cms
133  prim=[]
134  sec=[]
135  print "the query is",query
136  eC=5
137  count=0
138  while eC!=0 and count<3:
139  if count!=0:
140  print 'Sleeping, then retrying DAS'
141  time.sleep(100)
142  p = Popen('das_client %s --query "%s"'%(option,query), stdout=PIPE,shell=True)
143  pipe=p.stdout.read()
144  tupleP = os.waitpid(p.pid, 0)
145  eC=tupleP[1]
146  count=count+1
147  if eC==0:
148  print "DAS succeeded after",count,"attempts",eC
149  else:
150  print "DAS failed 3 times- I give up"
151  for line in pipe.split('\n'):
152  if line.count(".root")>=2:
153  #two files solution...
154  entries=line.replace("\n","").split()
155  if not entries[0] in prim:
156  prim.append(entries[0])
157  if not entries[1] in sec:
158  sec.append(entries[1])
159  elif (line.find(".root")!=-1):
160  entry=line.replace("\n","")
161  if not entry in prim:
162  prim.append(entry)
163  if s:
164  if not hasattr(s,"fileNames"):
165  s.fileNames=cms.untracked.vstring(prim)
166  else:
167  s.fileNames.extend(prim)
168  if len(sec)!=0:
169  if not hasattr(s,"secondaryFileNames"):
170  s.secondaryFileNames=cms.untracked.vstring(sec)
171  else:
172  s.secondaryFileNames.extend(sec)
173  print "found files: ",prim
174  if len(sec)!=0:
175  print "found parent files:",sec
176  return (prim,sec)
177 
178 def anyOf(listOfKeys,dict,opt=None):
179  for k in listOfKeys:
180  if k in dict:
181  toReturn=dict[k]
182  dict.pop(k)
183  return toReturn
184  if opt!=None:
185  return opt
186  else:
187  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
188 
190  """The main building routines """
191 
192  def __init__(self, options, process = None, with_output = False, with_input = False ):
193  """options taken from old cmsDriver and optparse """
194 
195  options.outfile_name = options.dirout+options.fileout
196 
197  self._options = options
198 
199  if self._options.isData and options.isMC:
200  raise Exception("ERROR: You may specify only --data or --mc, not both")
201  #if not self._options.conditions:
202  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
203 
204  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
205  if 'ENDJOB' in self._options.step:
206  if (hasattr(self._options,"outputDefinition") and \
207  self._options.outputDefinition != '' and \
208  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
209  (hasattr(self._options,"datatier") and \
210  self._options.datatier and \
211  'DQMIO' in self._options.datatier):
212  print "removing ENDJOB from steps since not compatible with DQMIO dataTier"
213  self._options.step=self._options.step.replace(',ENDJOB','')
214 
215 
216 
217  # what steps are provided by this class?
218  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
219  self.stepMap={}
220  self.stepKeys=[]
221  for step in self._options.step.split(","):
222  if step=='': continue
223  stepParts = step.split(":")
224  stepName = stepParts[0]
225  if stepName not in stepList and not stepName.startswith('re'):
226  raise ValueError("Step "+stepName+" unknown")
227  if len(stepParts)==1:
228  self.stepMap[stepName]=""
229  elif len(stepParts)==2:
230  self.stepMap[stepName]=stepParts[1].split('+')
231  elif len(stepParts)==3:
232  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
233  else:
234  raise ValueError("Step definition "+step+" invalid")
235  self.stepKeys.append(stepName)
236 
237  #print "map of steps is:",self.stepMap
238 
239  self.with_output = with_output
240  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
241  self.with_output = False
242  self.with_input = with_input
243  if process == None:
244  self.process = cms.Process(self._options.name)
245  else:
246  self.process = process
247  self.imports = []
248  self.define_Configs()
249  self.schedule = list()
250 
251  # we are doing three things here:
252  # creating a process to catch errors
253  # building the code to re-create the process
254 
255  self.additionalCommands = []
256  # TODO: maybe a list of to be dumped objects would help as well
257  self.blacklist_paths = []
258  self.addedObjects = []
259  self.additionalOutputs = {}
260 
261  self.productionFilterSequence = None
262  self.labelsToAssociate=[]
263  self.nextScheduleIsConditional=False
264  self.conditionalPaths=[]
265  self.excludedPaths=[]
266 
267  def profileOptions(self):
268  """
269  addIgProfService
270  Function to add the igprof profile service so that you can dump in the middle
271  of the run.
272  """
273  profileOpts = self._options.profile.split(':')
274  profilerStart = 1
275  profilerInterval = 100
276  profilerFormat = None
277  profilerJobFormat = None
278 
279  if len(profileOpts):
280  #type, given as first argument is unused here
281  profileOpts.pop(0)
282  if len(profileOpts):
283  startEvent = profileOpts.pop(0)
284  if not startEvent.isdigit():
285  raise Exception("%s is not a number" % startEvent)
286  profilerStart = int(startEvent)
287  if len(profileOpts):
288  eventInterval = profileOpts.pop(0)
289  if not eventInterval.isdigit():
290  raise Exception("%s is not a number" % eventInterval)
291  profilerInterval = int(eventInterval)
292  if len(profileOpts):
293  profilerFormat = profileOpts.pop(0)
294 
295 
296  if not profilerFormat:
297  profilerFormat = "%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace("_cfi", ""),
298  self._options.step,
299  self._options.pileup,
300  self._options.conditions,
301  self._options.datatier,
302  self._options.profileTypeLabel)
303  if not profilerJobFormat and profilerFormat.endswith(".gz"):
304  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
305  elif not profilerJobFormat:
306  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
307 
308  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
309 
310  def load(self,includeFile):
311  includeFile = includeFile.replace('/','.')
312  self.process.load(includeFile)
313  return sys.modules[includeFile]
314 
315  def loadAndRemember(self, includeFile):
316  """helper routine to load am memorize imports"""
317  # we could make the imports a on-the-fly data method of the process instance itself
318  # not sure if the latter is a good idea
319  includeFile = includeFile.replace('/','.')
320  self.imports.append(includeFile)
321  self.process.load(includeFile)
322  return sys.modules[includeFile]
323 
324  def executeAndRemember(self, command):
325  """helper routine to remember replace statements"""
326  self.additionalCommands.append(command)
327  if not command.strip().startswith("#"):
328  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
329  import re
330  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
331  #exec(command.replace("process.","self.process."))
332 
333  def addCommon(self):
334  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
335  self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring('ProductNotFound'),fileMode = cms.untracked.string('FULLMERGE'))
336  else:
337  self.process.options = cms.untracked.PSet( )
338 
339  self.addedObjects.append(("","options"))
340 
341  if self._options.lazy_download:
342  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
343  stats = cms.untracked.bool(True),
344  enable = cms.untracked.bool(True),
345  cacheHint = cms.untracked.string("lazy-download"),
346  readHint = cms.untracked.string("read-ahead-buffered")
347  )
348  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
349 
350  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
351  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
352 
353  if self._options.profile:
354  (start, interval, eventFormat, jobFormat)=self.profileOptions()
355  self.process.IgProfService = cms.Service("IgProfService",
356  reportFirstEvent = cms.untracked.int32(start),
357  reportEventInterval = cms.untracked.int32(interval),
358  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
359  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
360  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
361 
362  def addMaxEvents(self):
363  """Here we decide how many evts will be processed"""
364  self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(int(self._options.number)))
365  if self._options.number_out:
366  self.process.maxEvents.output = cms.untracked.int32(int(self._options.number_out))
367  self.addedObjects.append(("","maxEvents"))
368 
369  def addSource(self):
370  """Here the source is built. Priority: file, generator"""
371  self.addedObjects.append(("Input source","source"))
372 
373  def filesFromOption(self):
374  for entry in self._options.filein.split(','):
375  print "entry",entry
376  if entry.startswith("filelist:"):
377  filesFromList(entry[9:],self.process.source)
378  elif entry.startswith("dbs:") or entry.startswith("das:"):
379  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
380  else:
381  self.process.source.fileNames.append(self._options.dirin+entry)
382  if self._options.secondfilein:
383  if not hasattr(self.process.source,"secondaryFileNames"):
384  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
385  for entry in self._options.secondfilein.split(','):
386  print "entry",entry
387  if entry.startswith("filelist:"):
388  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
389  elif entry.startswith("dbs:") or entry.startswith("das:"):
390  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
391  else:
392  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
393 
394  if self._options.filein or self._options.dasquery:
395  if self._options.filetype == "EDM":
396  self.process.source=cms.Source("PoolSource",
397  fileNames = cms.untracked.vstring(),
398  secondaryFileNames= cms.untracked.vstring())
399  filesFromOption(self)
400  elif self._options.filetype == "DAT":
401  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
402  filesFromOption(self)
403  elif self._options.filetype == "LHE":
404  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
405  if self._options.filein.startswith("lhe:"):
406  #list the article directory automatically
407  args=self._options.filein.split(':')
408  article=args[1]
409  print 'LHE input from article ',article
410  location='/store/lhe/'
411  import os
412  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
413  for line in textOfFiles:
414  for fileName in [x for x in line.split() if '.lhe' in x]:
415  self.process.source.fileNames.append(location+article+'/'+fileName)
416  #check first if list of LHE files is loaded (not empty)
417  if len(line)<2:
418  print 'Issue to load LHE files, please check and try again.'
419  sys.exit(-1)
420  #Additional check to protect empty fileNames in process.source
421  if len(self.process.source.fileNames)==0:
422  print 'Issue with empty filename, but can pass line check'
423  sys.exit(-1)
424  if len(args)>2:
425  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
426  else:
427  filesFromOption(self)
428 
429  elif self._options.filetype == "DQM":
430  self.process.source=cms.Source("DQMRootSource",
431  fileNames = cms.untracked.vstring())
432  filesFromOption(self)
433 
434  elif self._options.filetype == "DQMDAQ":
435  # FIXME: how to configure it if there are no input files specified?
436  self.process.source=cms.Source("DQMStreamerReader")
437 
438 
439  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
440  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
441 
442  if self._options.dasquery!='':
443  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
444  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
445 
446  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
447  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
448 
449  ##drop LHEXMLStringProduct on input to save memory if appropriate
450  if 'GEN' in self.stepMap.keys():
451  if self._options.inputCommands:
452  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
453  else:
454  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
455 
456  if self.process.source and self._options.inputCommands:
457  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
458  for command in self._options.inputCommands.split(','):
459  # remove whitespace around the keep/drop statements
460  command = command.strip()
461  if command=='': continue
462  self.process.source.inputCommands.append(command)
463  if not self._options.dropDescendant:
464  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
465 
466  if self._options.lumiToProcess:
467  import FWCore.PythonUtilities.LumiList as LumiList
468  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
469 
470  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
471  if self.process.source is None:
472  self.process.source=cms.Source("EmptySource")
473 
474  # modify source in case of run-dependent MC
475  self.runsAndWeights=None
476  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
477  if not self._options.isMC :
478  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
479  if self._options.runsAndWeightsForMC:
480  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
481  else:
482  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
483  if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
484  __import__(RunsAndWeights[self._options.runsScenarioForMC])
485  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
486  else:
487  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
488 
489  if self.runsAndWeights:
490  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
491  ThrowAndSetRandomRun.throwAndSetRandomRun(self.process.source,self.runsAndWeights)
492  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
493  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
494 
495  return
496 
497  def addOutput(self):
498  """ Add output module to the process """
499  result=""
500  if self._options.outputDefinition:
501  if self._options.datatier:
502  print "--datatier & --eventcontent options ignored"
503 
504  #new output convention with a list of dict
505  outList = eval(self._options.outputDefinition)
506  for (id,outDefDict) in enumerate(outList):
507  outDefDictStr=outDefDict.__str__()
508  if not isinstance(outDefDict,dict):
509  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
510  #requires option: tier
511  theTier=anyOf(['t','tier','dataTier'],outDefDict)
512  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
513  ## event content
514  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
515  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
516  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
517  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
518  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
519  # module label has a particular role
520  if not theModuleLabel:
521  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
522  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
523  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
524  ]
525  for name in tryNames:
526  if not hasattr(self.process,name):
527  theModuleLabel=name
528  break
529  if not theModuleLabel:
530  raise Exception("cannot find a module label for specification: "+outDefDictStr)
531  if id==0:
532  defaultFileName=self._options.outfile_name
533  else:
534  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
535 
536  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
537  if not theFileName.endswith('.root'):
538  theFileName+='.root'
539 
540  if len(outDefDict.keys()):
541  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
542  if theStreamType=='DQMIO': theStreamType='DQM'
543  if theStreamType=='ALL':
544  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
545  else:
546  theEventContent = getattr(self.process, theStreamType+"EventContent")
547 
548 
549  addAlCaSelects=False
550  if theStreamType=='ALCARECO' and not theFilterName:
551  theFilterName='StreamALCACombined'
552  addAlCaSelects=True
553 
554  CppType='PoolOutputModule'
555  if self._options.timeoutOutput:
556  CppType='TimeoutPoolOutputModule'
557  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
558  output = cms.OutputModule(CppType,
559  theEventContent.clone(),
560  fileName = cms.untracked.string(theFileName),
561  dataset = cms.untracked.PSet(
562  dataTier = cms.untracked.string(theTier),
563  filterName = cms.untracked.string(theFilterName))
564  )
565  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
566  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
567  if not theSelectEvent and hasattr(self.process,'filtering_step'):
568  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
569  if theSelectEvent:
570  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
571 
572  if addAlCaSelects:
573  if not hasattr(output,'SelectEvents'):
574  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
575  for alca in self.AlCaPaths:
576  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
577 
578 
579  if hasattr(self.process,theModuleLabel):
580  raise Exception("the current process already has a module "+theModuleLabel+" defined")
581  #print "creating output module ",theModuleLabel
582  setattr(self.process,theModuleLabel,output)
583  outputModule=getattr(self.process,theModuleLabel)
584  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
585  path=getattr(self.process,theModuleLabel+'_step')
586  self.schedule.append(path)
587 
588  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
589  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"):
590  return label
591  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
592  if theExtraOutputCommands:
593  if not isinstance(theExtraOutputCommands,list):
594  raise Exception("extra ouput command in --option must be a list of strings")
595  if hasattr(self.process,theStreamType+"EventContent"):
596  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
597  else:
598  outputModule.outputCommands.extend(theExtraOutputCommands)
599 
600  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
601 
602  ##ends the --output options model
603  return result
604 
605  streamTypes=self._options.eventcontent.split(',')
606  tiers=self._options.datatier.split(',')
607  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
608  raise Exception("number of event content arguments does not match number of datatier arguments")
609 
610  # if the only step is alca we don't need to put in an output
611  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
612  return "\n"
613 
614  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
615  if streamType=='': continue
616  if streamType == 'ALCARECO' and not 'ALCAPRODUCER' in self._options.step: continue
617  if streamType=='DQMIO': streamType='DQM'
618  theEventContent = getattr(self.process, streamType+"EventContent")
619  if i==0:
620  theFileName=self._options.outfile_name
621  theFilterName=self._options.filtername
622  else:
623  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
624  theFilterName=self._options.filtername
625  CppType='PoolOutputModule'
626  if self._options.timeoutOutput:
627  CppType='TimeoutPoolOutputModule'
628  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
629  output = cms.OutputModule(CppType,
630  theEventContent,
631  fileName = cms.untracked.string(theFileName),
632  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
633  filterName = cms.untracked.string(theFilterName)
634  )
635  )
636  if hasattr(self.process,"generation_step") and streamType!='LHE':
637  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
638  if hasattr(self.process,"filtering_step"):
639  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
640 
641  if streamType=='ALCARECO':
642  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
643 
644  if "MINIAOD" in streamType:
645  output.dropMetaData = cms.untracked.string('ALL')
646  output.fastCloning= cms.untracked.bool(False)
647  output.overrideInputFileSplitLevels = cms.untracked.bool(True)
648 
649  outputModuleName=streamType+'output'
650  setattr(self.process,outputModuleName,output)
651  outputModule=getattr(self.process,outputModuleName)
652  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
653  path=getattr(self.process,outputModuleName+'_step')
654  self.schedule.append(path)
655 
656  if self._options.outputCommands and streamType!='DQM':
657  for evct in self._options.outputCommands.split(','):
658  if not evct: continue
659  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
660 
661  if not self._options.inlineEventContent:
662  def doNotInlineEventContent(instance,label = "process."+streamType+"EventContent.outputCommands"):
663  return label
664  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
665 
666  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
667 
668  return result
669 
671  """
672  Add selected standard sequences to the process
673  """
674  # load the pile up file
675  if self._options.pileup:
676  pileupSpec=self._options.pileup.split(',')[0]
677 
678  # Does the requested pile-up scenario exist?
679  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
680  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
681  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
682  raise Exception(message)
683 
684  # Put mixing parameters in a dictionary
685  if '.' in pileupSpec:
686  mixingDict={'file':pileupSpec}
687  elif pileupSpec.startswith('file:'):
688  mixingDict={'file':pileupSpec[5:]}
689  else:
690  import copy
691  mixingDict=copy.copy(Mixing[pileupSpec])
692  if len(self._options.pileup.split(','))>1:
693  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
694 
695  # Load the pu cfg file corresponding to the requested pu scenario
696  if 'file:' in pileupSpec:
697  #the file is local
698  self.process.load(mixingDict['file'])
699  print "inlining mixing module configuration"
700  self._options.inlineObjets+=',mix'
701  else:
702  self.loadAndRemember(mixingDict['file'])
703 
704  mixingDict.pop('file')
705  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
706  if self._options.pileup_input:
707  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
708  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
709  elif self._options.pileup_input.startswith("filelist:"):
710  mixingDict['F']=(filesFromList(self._options.pileup_input[9:]))[0]
711  else:
712  mixingDict['F']=self._options.pileup_input.split(',')
713  specialization=defineMixing(mixingDict)
714  for command in specialization:
715  self.executeAndRemember(command)
716  if len(mixingDict)!=0:
717  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
718 
719 
720  # load the geometry file
721  try:
722  if len(self.stepMap):
723  self.loadAndRemember(self.GeometryCFF)
724  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
725  self.loadAndRemember(self.SimGeometryCFF)
726  if self.geometryDBLabel:
727  self.executeAndRemember('process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
728  except ImportError:
729  print "Geometry option",self._options.geometry,"unknown."
730  raise
731 
732  if len(self.stepMap):
733  self.loadAndRemember(self.magFieldCFF)
734 
735  for stepName in self.stepKeys:
736  stepSpec = self.stepMap[stepName]
737  print "Step:", stepName,"Spec:",stepSpec
738  if stepName.startswith('re'):
739  ##add the corresponding input content
740  if stepName[2:] not in self._options.donotDropOnInput:
741  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
742  stepName=stepName[2:]
743  if stepSpec=="":
744  getattr(self,"prepare_"+stepName)(sequence = getattr(self,stepName+"DefaultSeq"))
745  elif type(stepSpec)==list:
746  getattr(self,"prepare_"+stepName)(sequence = '+'.join(stepSpec))
747  elif type(stepSpec)==tuple:
748  getattr(self,"prepare_"+stepName)(sequence = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
749  else:
750  raise ValueError("Invalid step definition")
751 
752  if self._options.restoreRNDSeeds!=False:
753  #it is either True, or a process name
754  if self._options.restoreRNDSeeds==True:
755  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
756  else:
757  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
758  if self._options.inputEventContent or self._options.inputCommands:
759  if self._options.inputCommands:
760  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
761  else:
762  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
763 
764 
766  if self._options.inputEventContent:
767  import copy
768  def dropSecondDropStar(iec):
769  #drop occurence of 'drop *' in the list
770  count=0
771  for item in iec:
772  if item=='drop *':
773  if count!=0:
774  iec.remove(item)
775  count+=1
776 
777 
778  ## allow comma separated input eventcontent
779  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
780  for evct in self._options.inputEventContent.split(','):
781  if evct=='': continue
782  theEventContent = getattr(self.process, evct+"EventContent")
783  if hasattr(theEventContent,'outputCommands'):
784  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
785  if hasattr(theEventContent,'inputCommands'):
786  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
787 
788  dropSecondDropStar(self.process.source.inputCommands)
789 
790  if not self._options.dropDescendant:
791  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
792 
793 
794  return
795 
796  def addConditions(self):
797  """Add conditions to the process"""
798  if not self._options.conditions: return
799 
800  if 'FrontierConditions_GlobalTag' in self._options.conditions:
801  print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line'
802  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
803 
804  self.loadAndRemember(self.ConditionsDefaultCFF)
805  from Configuration.AlCa.GlobalTag import GlobalTag
806  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
807  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
808  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
809 
810 
811  def addCustomise(self,unsch=0):
812  """Include the customise code """
813 
814  custOpt=[]
815  if unsch==0:
816  for c in self._options.customisation_file:
817  custOpt.extend(c.split(","))
818  else:
819  for c in self._options.customisation_file_unsch:
820  custOpt.extend(c.split(","))
821 
822  custMap=DictTypes.SortedKeysDict()
823  for opt in custOpt:
824  if opt=='': continue
825  if opt.count('.')>1:
826  raise Exception("more than . in the specification:"+opt)
827  fileName=opt.split('.')[0]
828  if opt.count('.')==0: rest='customise'
829  else:
830  rest=opt.split('.')[1]
831  if rest=='py': rest='customise' #catch the case of --customise file.py
832 
833  if fileName in custMap:
834  custMap[fileName].extend(rest.split('+'))
835  else:
836  custMap[fileName]=rest.split('+')
837 
838  if len(custMap)==0:
839  final_snippet='\n'
840  else:
841  final_snippet='\n# customisation of the process.\n'
842 
843  allFcn=[]
844  for opt in custMap:
845  allFcn.extend(custMap[opt])
846  for fcn in allFcn:
847  if allFcn.count(fcn)!=1:
848  raise Exception("cannot specify twice "+fcn+" as a customisation method")
849 
850  for f in custMap:
851  # let python search for that package and do syntax checking at the same time
852  packageName = f.replace(".py","").replace("/",".")
853  __import__(packageName)
854  package = sys.modules[packageName]
855 
856  # now ask the package for its definition and pick .py instead of .pyc
857  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
858 
859  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
860  if self._options.inline_custom:
861  for line in file(customiseFile,'r'):
862  if "import FWCore.ParameterSet.Config" in line:
863  continue
864  final_snippet += line
865  else:
866  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
867  for fcn in custMap[f]:
868  print "customising the process with",fcn,"from",f
869  if not hasattr(package,fcn):
870  #bound to fail at run time
871  raise Exception("config "+f+" has no function "+fcn)
872  #execute the command
873  self.process=getattr(package,fcn)(self.process)
874  #and print it in the configuration
875  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
876  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
877 
878  if len(custMap)!=0:
879  final_snippet += '\n# End of customisation functions\n'
880 
881  ### now for a useful command
882  return final_snippet
883 
885  final_snippet='\n# Customisation from command line\n'
886  if self._options.customise_commands:
887  import string
888  for com in self._options.customise_commands.split('\\n'):
889  com=string.lstrip(com)
890  self.executeAndRemember(com)
891  final_snippet +='\n'+com
892 
893  return final_snippet
894 
895  #----------------------------------------------------------------------------
896  # here the methods to define the python includes for each step or
897  # conditions
898  #----------------------------------------------------------------------------
899  def define_Configs(self):
900  if len(self.stepMap):
901  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
902  if self._options.particleTable not in defaultOptions.particleTableList:
903  print 'Invalid particle table provided. Options are:'
904  print defaultOptions.particleTable
905  sys.exit(-1)
906  else:
907  if len(self.stepMap):
908  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
909 
910  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
911 
912  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
913  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
914  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
915  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
916  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
917  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
918  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
919  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
920  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
921  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
922  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
923  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
924  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
925  self.EIDefaultCFF=None
926  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
927  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
928  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
929  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
930  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
931  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
932  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
933  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
934  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
935  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
936  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
937 
938  if "DATAMIX" in self.stepMap.keys():
939  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
940  if self._options.datamix == 'PreMix':
941  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDMPreMix_cff"
942  else:
943  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
944  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
945  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
946 
947  if "DIGIPREMIX" in self.stepMap.keys():
948  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_PreMix_cff"
949  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawPreMixing_cff"
950  self.L1EMDefaultCFF="Configuration/StandardSequences/SimL1EmulatorPreMix_cff"
951 
952  self.ALCADefaultSeq=None
953  self.LHEDefaultSeq='externalLHEProducer'
954  self.GENDefaultSeq='pgen'
955  self.SIMDefaultSeq='psim'
956  self.DIGIDefaultSeq='pdigi'
957  self.DIGIPREMIXDefaultSeq='pdigi'
958  self.DIGIPREMIX_S2DefaultSeq='pdigi'
959  self.DATAMIXDefaultSeq=None
960  self.DIGI2RAWDefaultSeq='DigiToRaw'
961  self.HLTDefaultSeq='GRun'
962  self.L1DefaultSeq=None
963  self.L1REPACKDefaultSeq='GT'
964  self.HARVESTINGDefaultSeq=None
965  self.ALCAHARVESTDefaultSeq=None
966  self.CFWRITERDefaultSeq=None
967  self.RAW2DIGIDefaultSeq='RawToDigi'
968  self.L1RecoDefaultSeq='L1Reco'
969  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
970  if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
971  self.RECODefaultSeq='reconstruction'
972  else:
973  self.RECODefaultSeq='reconstruction_fromRECO'
974 
975  self.EIDefaultSeq='top'
976  self.POSTRECODefaultSeq=None
977  self.L1HwValDefaultSeq='L1HwVal'
978  self.DQMDefaultSeq='DQMOffline'
979  self.VALIDATIONDefaultSeq=''
980  self.ENDJOBDefaultSeq='endOfProcess'
981  self.REPACKDefaultSeq='DigiToRawRepack'
982  self.PATDefaultSeq='miniAOD'
983 
984  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
985 
986  if not self._options.beamspot:
987  self._options.beamspot=VtxSmearedDefaultKey
988 
989  # if its MC then change the raw2digi
990  if self._options.isMC==True:
991  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
992  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
993  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
994  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
995  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
996  else:
997  self._options.beamspot = None
998 
999  #patch for gen, due to backward incompatibility
1000  if 'reGEN' in self.stepMap:
1001  self.GENDefaultSeq='fixGenInfo'
1002 
1003  if self._options.scenario=='cosmics':
1004  self._options.pileup='Cosmics'
1005  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1006  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1007  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1008  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1009  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1010  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1011  if self._options.isMC==True:
1012  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1013  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1014  self.RECODefaultSeq='reconstructionCosmics'
1015  self.DQMDefaultSeq='DQMOfflineCosmics'
1016 
1017  if self._options.scenario=='HeavyIons':
1018  if not self._options.beamspot:
1019  self._options.beamspot=VtxSmearedHIDefaultKey
1020  self.HLTDefaultSeq = 'HIon'
1021  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1022  self.VALIDATIONDefaultSeq=''
1023  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1024  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1025  self.RECODefaultSeq='reconstructionHeavyIons'
1026  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1027  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1028  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1029  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1030  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1031  if self._options.isMC==True:
1032  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1033 
1034 
1035  self.RAW2RECODefaultSeq=','.join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1036 
1037  self.USERDefaultSeq='user'
1038  self.USERDefaultCFF=None
1039 
1040  # the magnetic field
1041  if self._options.isData:
1042  if self._options.magField==defaultOptions.magField:
1043  print "magnetic field option forced to: AutoFromDBCurrent"
1044  self._options.magField='AutoFromDBCurrent'
1045  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1046  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1047 
1048  # the geometry
1049  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1050  self.geometryDBLabel=None
1051  simGeometry=''
1052  if self._options.fast:
1053  if 'start' in self._options.conditions.lower():
1054  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1055  else:
1056  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1057  else:
1058  def inGeometryKeys(opt):
1059  from Configuration.StandardSequences.GeometryConf import GeometryConf
1060  if opt in GeometryConf:
1061  return GeometryConf[opt]
1062  else:
1063  return opt
1064 
1065  geoms=self._options.geometry.split(',')
1066  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1067  if len(geoms)==2:
1068  #may specify the reco geometry
1069  if '/' in geoms[1] or '_cff' in geoms[1]:
1070  self.GeometryCFF=geoms[1]
1071  else:
1072  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1073 
1074  if (geoms[0].startswith('DB:')):
1075  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1076  self.geometryDBLabel=geoms[0][3:]
1077  print "with DB:"
1078  else:
1079  if '/' in geoms[0] or '_cff' in geoms[0]:
1080  self.SimGeometryCFF=geoms[0]
1081  else:
1082  simGeometry=geoms[0]
1083  if self._options.gflash==True:
1084  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1085  else:
1086  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1087 
1088  # synchronize the geometry configuration and the FullSimulation sequence to be used
1089  if simGeometry not in defaultOptions.geometryExtendedOptions:
1090  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1091 
1092  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1093  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1094  self._options.beamspot='NoSmear'
1095 
1096  # fastsim requires some changes to the default cff files and sequences
1097  if self._options.fast:
1098  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1099  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1100  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1101  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1102  self.DQMOFFLINEDefaultCFF="FastSimulation.Configuration.DQMOfflineMC_cff"
1103 
1104  # Mixing
1105  if self._options.pileup=='default':
1106  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1107  self._options.pileup=MixingDefaultKey
1108 
1109 
1110  #not driven by a default cff anymore
1111  if self._options.isData:
1112  self._options.pileup=None
1113 
1114 
1115  self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1116 
1117  # for alca, skims, etc
1118  def addExtraStream(self,name,stream,workflow='full'):
1119  # define output module and go from there
1120  output = cms.OutputModule("PoolOutputModule")
1121  if stream.selectEvents.parameters_().__len__()!=0:
1122  output.SelectEvents = stream.selectEvents
1123  else:
1124  output.SelectEvents = cms.untracked.PSet()
1125  output.SelectEvents.SelectEvents=cms.vstring()
1126  if isinstance(stream.paths,tuple):
1127  for path in stream.paths:
1128  output.SelectEvents.SelectEvents.append(path.label())
1129  else:
1130  output.SelectEvents.SelectEvents.append(stream.paths.label())
1131 
1132 
1133 
1134  if isinstance(stream.content,str):
1135  evtPset=getattr(self.process,stream.content)
1136  for p in evtPset.parameters_():
1137  setattr(output,p,getattr(evtPset,p))
1138  if not self._options.inlineEventContent:
1139  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1140  return label
1141  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1142  else:
1143  output.outputCommands = stream.content
1144 
1145 
1146  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1147 
1148  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1149  filterName = cms.untracked.string(stream.name))
1150 
1151  if self._options.filtername:
1152  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1153 
1154  #add an automatic flushing to limit memory consumption
1155  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1156 
1157  if workflow in ("producers,full"):
1158  if isinstance(stream.paths,tuple):
1159  for path in stream.paths:
1160  self.schedule.append(path)
1161  else:
1162  self.schedule.append(stream.paths)
1163 
1164 
1165  # in case of relvals we don't want to have additional outputs
1166  if (not self._options.relval) and workflow in ("full","output"):
1167  self.additionalOutputs[name] = output
1168  setattr(self.process,name,output)
1169 
1170  if workflow == 'output':
1171  # adjust the select events to the proper trigger results from previous process
1172  filterList = output.SelectEvents.SelectEvents
1173  for i, filter in enumerate(filterList):
1174  filterList[i] = filter+":"+self._options.triggerResultsProcess
1175 
1176  return output
1177 
1178  #----------------------------------------------------------------------------
1179  # here the methods to create the steps. Of course we are doing magic here ;)
1180  # prepare_STEPNAME modifies self.process and what else's needed.
1181  #----------------------------------------------------------------------------
1182 
1183  def loadDefaultOrSpecifiedCFF(self, sequence,defaultCFF):
1184  if ( len(sequence.split('.'))==1 ):
1185  l=self.loadAndRemember(defaultCFF)
1186  elif ( len(sequence.split('.'))==2 ):
1187  l=self.loadAndRemember(sequence.split('.')[0])
1188  sequence=sequence.split('.')[1]
1189  else:
1190  print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a"
1191  print sequence,"not recognized"
1192  raise
1193  return l
1194 
1195  def scheduleSequence(self,seq,prefix,what='Path'):
1196  if '*' in seq:
1197  #create only one path with all sequences in it
1198  for i,s in enumerate(seq.split('*')):
1199  if i==0:
1200  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1201  else:
1202  p=getattr(self.process,prefix)
1203  p+=getattr(self.process, s)
1204  self.schedule.append(getattr(self.process,prefix))
1205  return
1206  else:
1207  #create as many path as many sequences
1208  if not '+' in seq:
1209  if self.nextScheduleIsConditional:
1210  self.conditionalPaths.append(prefix)
1211  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1212  self.schedule.append(getattr(self.process,prefix))
1213  else:
1214  for i,s in enumerate(seq.split('+')):
1215  sn=prefix+'%d'%(i)
1216  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1217  self.schedule.append(getattr(self.process,sn))
1218  return
1219 
1220  def scheduleSequenceAtEnd(self,seq,prefix):
1221  self.scheduleSequence(seq,prefix,what='EndPath')
1222  return
1223 
1224  def prepare_ALCAPRODUCER(self, sequence = None):
1225  self.prepare_ALCA(sequence, workflow = "producers")
1226 
1227  def prepare_ALCAOUTPUT(self, sequence = None):
1228  self.prepare_ALCA(sequence, workflow = "output")
1229 
1230  def prepare_ALCA(self, sequence = None, workflow = 'full'):
1231  """ Enrich the process with alca streams """
1232  alcaConfig=self.loadDefaultOrSpecifiedCFF(sequence,self.ALCADefaultCFF)
1233  sequence = sequence.split('.')[-1]
1234 
1235  # decide which ALCA paths to use
1236  alcaList = sequence.split("+")
1237  maxLevel=0
1238  from Configuration.AlCa.autoAlca import autoAlca
1239  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1240  self.expandMapping(alcaList,autoAlca)
1241  self.AlCaPaths=[]
1242  for name in alcaConfig.__dict__:
1243  alcastream = getattr(alcaConfig,name)
1244  shortName = name.replace('ALCARECOStream','')
1245  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1246  output = self.addExtraStream(name,alcastream, workflow = workflow)
1247  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1248  self.AlCaPaths.append(shortName)
1249  if 'DQM' in alcaList:
1250  if not self._options.inlineEventContent and hasattr(self.process,name):
1251  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1252  else:
1253  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1254 
1255  #rename the HLT process name in the alca modules
1256  if self._options.hltProcess or 'HLT' in self.stepMap:
1257  if isinstance(alcastream.paths,tuple):
1258  for path in alcastream.paths:
1259  self.renameHLTprocessInSequence(path.label())
1260  else:
1261  self.renameHLTprocessInSequence(alcastream.paths.label())
1262 
1263  for i in range(alcaList.count(shortName)):
1264  alcaList.remove(shortName)
1265 
1266  # DQM needs a special handling
1267  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1268  path = getattr(alcaConfig,name)
1269  self.schedule.append(path)
1270  alcaList.remove('DQM')
1271 
1272  if isinstance(alcastream,cms.Path):
1273  #black list the alca path so that they do not appear in the cfg
1274  self.blacklist_paths.append(alcastream)
1275 
1276 
1277  if len(alcaList) != 0:
1278  available=[]
1279  for name in alcaConfig.__dict__:
1280  alcastream = getattr(alcaConfig,name)
1281  if isinstance(alcastream,cms.FilteredStream):
1282  available.append(name.replace('ALCARECOStream',''))
1283  print "The following alcas could not be found "+str(alcaList)
1284  print "available ",available
1285  #print "verify your configuration, ignoring for now"
1286  raise Exception("The following alcas could not be found "+str(alcaList))
1287 
1288  def prepare_LHE(self, sequence = None):
1289  #load the fragment
1290  ##make it loadable
1291  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1292  print "Loading lhe fragment from",loadFragment
1293  __import__(loadFragment)
1294  self.process.load(loadFragment)
1295  ##inline the modules
1296  self._options.inlineObjets+=','+sequence
1297 
1298  getattr(self.process,sequence).nEvents = int(self._options.number)
1299 
1300  #schedule it
1301  self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1302  self.excludedPaths.append("lhe_step")
1303  self.schedule.append( self.process.lhe_step )
1304 
1305  def prepare_GEN(self, sequence = None):
1306  """ load the fragment of generator configuration """
1307  loadFailure=False
1308  #remove trailing .py
1309  #support old style .cfi by changing into something.cfi into something_cfi
1310  #remove python/ from the name
1311  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1312  #standard location of fragments
1313  if not '/' in loadFragment:
1314  loadFragment='Configuration.Generator.'+loadFragment
1315  else:
1316  loadFragment=loadFragment.replace('/','.')
1317  try:
1318  print "Loading generator fragment from",loadFragment
1319  __import__(loadFragment)
1320  except:
1321  loadFailure=True
1322  #if self.process.source and self.process.source.type_()=='EmptySource':
1323  if not (self._options.filein or self._options.dasquery):
1324  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1325 
1326  if not loadFailure:
1327  generatorModule=sys.modules[loadFragment]
1328  genModules=generatorModule.__dict__
1329  #remove lhe producer module since this should have been
1330  #imported instead in the LHE step
1331  if self.LHEDefaultSeq in genModules:
1332  del genModules[self.LHEDefaultSeq]
1333 
1334  if self._options.hideGen:
1335  self.loadAndRemember(loadFragment)
1336  else:
1337  self.process.load(loadFragment)
1338  # expose the objects from that fragment to the configuration
1339  import FWCore.ParameterSet.Modules as cmstypes
1340  for name in genModules:
1341  theObject = getattr(generatorModule,name)
1342  if isinstance(theObject, cmstypes._Module):
1343  self._options.inlineObjets=name+','+self._options.inlineObjets
1344  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1345  self._options.inlineObjets+=','+name
1346 
1347  if sequence == self.GENDefaultSeq or sequence == 'pgen_genonly':
1348  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1349  self.productionFilterSequence = 'ProductionFilterSequence'
1350  elif 'generator' in genModules:
1351  self.productionFilterSequence = 'generator'
1352 
1353  """ Enrich the schedule with the rest of the generation step """
1354  self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1355  genSeqName=sequence.split('.')[-1]
1356 
1357  if True:
1358  try:
1359  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1360  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1361  self.loadAndRemember(cffToBeLoaded)
1362  except ImportError:
1363  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1364 
1365  if self._options.scenario == 'HeavyIons':
1366  if self._options.pileup=='HiMixGEN':
1367  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1368  else:
1369  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1370 
1371  self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1372  self.schedule.append(self.process.generation_step)
1373 
1374  #register to the genstepfilter the name of the path (static right now, but might evolve)
1375  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1376 
1377  if 'reGEN' in self.stepMap:
1378  #stop here
1379  return
1380 
1381  """ Enrich the schedule with the summary of the filter step """
1382  #the gen filter in the endpath
1383  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1384  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1385  return
1386 
1387  def prepare_SIM(self, sequence = None):
1388  """ Enrich the schedule with the simulation step"""
1389  self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1390  if not self._options.fast:
1391  if self._options.gflash==True:
1392  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1393 
1394  if self._options.magField=='0T':
1395  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1396  else:
1397  if self._options.magField=='0T':
1398  self.executeAndRemember("process.famosSimHits.UseMagneticField = cms.bool(False)")
1399 
1400  self.scheduleSequence(sequence.split('.')[-1],'simulation_step')
1401  return
1402 
1403  def prepare_DIGI(self, sequence = None):
1404  """ Enrich the schedule with the digitisation step"""
1405  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1406 
1407  if self._options.gflash==True:
1408  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1409 
1410  if sequence == 'pdigi_valid' or sequence == 'pdigi_hi':
1411  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1412 
1413  if sequence != 'pdigi_nogen' and sequence != 'pdigi_valid_nogen' and not self.process.source.type_()=='EmptySource':
1414  if self._options.inputEventContent=='':
1415  self._options.inputEventContent='REGEN'
1416  else:
1417  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1418 
1419 
1420  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1421  return
1422 
1423  def prepare_DIGIPREMIX(self, sequence = None):
1424  """ Enrich the schedule with the digitisation step"""
1425  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1426 
1427  self.loadAndRemember("SimGeneral/MixingModule/digi_noNoise_cfi")
1428 
1429  if sequence == 'pdigi_valid':
1430  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1431  else:
1432  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1433 
1434  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1435  return
1436 
1437  def prepare_DIGIPREMIX_S2(self, sequence = None):
1438  """ Enrich the schedule with the digitisation step"""
1439  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1440 
1441  self.loadAndRemember("SimGeneral/MixingModule/digi_MixPreMix_cfi")
1442 
1443 
1444  if sequence == 'pdigi_valid':
1445  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1446  else:
1447  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1448 
1449  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1450  return
1451 
1452  def prepare_CFWRITER(self, sequence = None):
1453  """ Enrich the schedule with the crossing frame writer step"""
1454  self.loadAndRemember(self.CFWRITERDefaultCFF)
1455  self.scheduleSequence('pcfw','cfwriter_step')
1456  return
1457 
1458  def prepare_DATAMIX(self, sequence = None):
1459  """ Enrich the schedule with the digitisation step"""
1460  self.loadAndRemember(self.DATAMIXDefaultCFF)
1461  self.scheduleSequence('pdatamix','datamixing_step')
1462 
1463  if self._options.pileup_input:
1464  theFiles=''
1465  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1466  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1467  elif self._options.pileup_input.startswith("filelist:"):
1468  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1469  else:
1470  theFiles=self._options.pileup_input.split(',')
1471  #print theFiles
1472  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1473 
1474  return
1475 
1476  def prepare_DIGI2RAW(self, sequence = None):
1477  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGI2RAWDefaultCFF)
1478  self.scheduleSequence(sequence.split('.')[-1],'digi2raw_step')
1479  if "DIGIPREMIX" in self.stepMap.keys():
1480  self.executeAndRemember("process.esDigiToRaw.Label = cms.string('mix')") ##terrible hack - bypass zero suppression
1481  self.executeAndRemember("process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')") ##special readout mode for StripTracker
1482 
1483  return
1484 
1485  def prepare_REPACK(self, sequence = None):
1486  self.loadDefaultOrSpecifiedCFF(sequence,self.REPACKDefaultCFF)
1487  self.scheduleSequence(sequence.split('.')[-1],'digi2repack_step')
1488  return
1489 
1490  def prepare_L1(self, sequence = None):
1491  """ Enrich the schedule with the L1 simulation step"""
1492  assert(sequence == None)
1493  self.loadAndRemember(self.L1EMDefaultCFF)
1494  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1495  return
1496 
1497  def prepare_L1REPACK(self, sequence = None):
1498  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1499  supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT']
1500  if sequence in supported:
1501  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1502  if self._options.scenario == 'HeavyIons':
1503  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1504  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1505  else:
1506  print "L1REPACK with '",sequence,"' is not supported! Supported choices are: ",supported
1507  raise Exception('unsupported feature')
1508 
1509 
1510  def prepare_HLT(self, sequence = None):
1511  """ Enrich the schedule with the HLT simulation step"""
1512  if not sequence:
1513  print "no specification of the hlt menu has been given, should never happen"
1514  raise Exception('no HLT sequence provided')
1515 
1516  if '@' in sequence:
1517  # case where HLT:@something was provided
1518  from Configuration.HLT.autoHLT import autoHLT
1519  key = sequence[1:]
1520  if key in autoHLT:
1521  sequence = autoHLT[key]
1522  else:
1523  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1524 
1525  if ',' in sequence:
1526  #case where HLT:something:something was provided
1527  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1528  optionsForHLT = {}
1529  if self._options.scenario == 'HeavyIons':
1530  optionsForHLT['type'] = 'HIon'
1531  else:
1532  optionsForHLT['type'] = 'GRun'
1533  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.iteritems())
1534  if sequence == 'run,fromSource':
1535  if hasattr(self.process.source,'firstRun'):
1536  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1537  elif hasattr(self.process.source,'setRunNumber'):
1538  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1539  else:
1540  raise Exception('Cannot replace menu to load %s'%(sequence))
1541  else:
1542  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(sequence.replace(',',':'),optionsForHLTConfig))
1543  else:
1544  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % sequence)
1545 
1546  if self._options.isMC:
1547  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1548 
1549  if self._options.name != 'HLT':
1550  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1551  self.additionalCommands.append('process = ProcessName(process)')
1552  self.additionalCommands.append('')
1553  from HLTrigger.Configuration.CustomConfigs import ProcessName
1554  self.process = ProcessName(self.process)
1555 
1556  self.schedule.append(self.process.HLTSchedule)
1557  [self.blacklist_paths.append(path) for path in self.process.HLTSchedule if isinstance(path,(cms.Path,cms.EndPath))]
1558 
1559  #this is a fake, to be removed with fastim migration and HLT menu dump
1560  if self._options.fast:
1561  if not hasattr(self.process,'HLTEndSequence'):
1562  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1563 
1564 
1565  def prepare_RAW2RECO(self, sequence = None):
1566  if ','in sequence:
1567  seqReco=sequence.split(',')[1]
1568  seqDigi=sequence.split(',')[0]
1569  else:
1570  print "RAW2RECO requires two specifications",sequence,"insufficient"
1571 
1572  self.prepare_RAW2DIGI(seqDigi)
1573  self.prepare_RECO(seqReco)
1574  return
1575 
1576  def prepare_RAW2DIGI(self, sequence = "RawToDigi"):
1577  self.loadDefaultOrSpecifiedCFF(sequence,self.RAW2DIGIDefaultCFF)
1578  self.scheduleSequence(sequence,'raw2digi_step')
1579  # if self._options.isRepacked:
1580  #self.renameInputTagsInSequence(sequence)
1581  return
1582 
1583  def prepare_PATFILTER(self, sequence=None):
1584  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1585  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1586  for filt in allMetFilterPaths:
1587  self.schedule.append(getattr(self.process,'Flag_'+filt))
1588 
1589  def prepare_L1HwVal(self, sequence = 'L1HwVal'):
1590  ''' Enrich the schedule with L1 HW validation '''
1591  self.loadDefaultOrSpecifiedCFF(sequence,self.L1HwValDefaultCFF)
1592  #self.scheduleSequence(sequence.split('.')[-1],'l1hwval_step')
1593  print '\n\n\n DEPRECATED this has no action \n\n\n'
1594  return
1595 
1596  def prepare_L1Reco(self, sequence = "L1Reco"):
1597  ''' Enrich the schedule with L1 reconstruction '''
1598  self.loadDefaultOrSpecifiedCFF(sequence,self.L1RecoDefaultCFF)
1599  self.scheduleSequence(sequence.split('.')[-1],'L1Reco_step')
1600  return
1601 
1602  def prepare_L1TrackTrigger(self, sequence = "L1TrackTrigger"):
1603  ''' Enrich the schedule with L1 reconstruction '''
1605  self.scheduleSequence(sequence.split('.')[-1],'L1TrackTrigger_step')
1606  return
1607 
1608  def prepare_FILTER(self, sequence = None):
1609  ''' Enrich the schedule with a user defined filter sequence '''
1610  ## load the relevant part
1611  filterConfig=self.load(sequence.split('.')[0])
1612  filterSeq=sequence.split('.')[-1]
1613  ## print it in the configuration
1615  def __init__(self):
1616  self.inliner=''
1617  pass
1618  def enter(self,visitee):
1619  try:
1620  label=visitee.label()
1621  ##needs to be in reverse order
1622  self.inliner=label+','+self.inliner
1623  except:
1624  pass
1625  def leave(self,v): pass
1626 
1627  expander=PrintAllModules()
1628  getattr(self.process,filterSeq).visit( expander )
1629  self._options.inlineObjets+=','+expander.inliner
1630  self._options.inlineObjets+=','+filterSeq
1631 
1632  ## put the filtering path in the schedule
1633  self.scheduleSequence(filterSeq,'filtering_step')
1634  self.nextScheduleIsConditional=True
1635  ## put it before all the other paths
1636  self.productionFilterSequence = filterSeq
1637 
1638  return
1639 
1640  def prepare_RECO(self, sequence = "reconstruction"):
1641  ''' Enrich the schedule with reconstruction '''
1642  self.loadDefaultOrSpecifiedCFF(sequence,self.RECODefaultCFF)
1643  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_step')
1644  return
1645 
1646  def prepare_RECOBEFMIX(self, sequence = "reconstruction"):
1647  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1648  if not self._options.fast:
1649  print "ERROR: this step is only implemented for FastSim"
1650  sys.exit()
1651  self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1652  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_befmix_step')
1653  return
1654 
1655  def prepare_PAT(self, sequence = "miniAOD"):
1656  ''' Enrich the schedule with PAT '''
1657  self.prepare_PATFILTER(self)
1658  self.loadDefaultOrSpecifiedCFF(sequence,self.PATDefaultCFF)
1659  self.labelsToAssociate.append('patTask')
1660  if not self._options.runUnscheduled:
1661  raise Exception("MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1662  if self._options.isData:
1663  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1664  else:
1665  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1666  if self._options.fast:
1667  self._options.customisation_file_unsch.insert(1,"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff.miniAOD_customizeMETFiltersFastSim")
1668 
1669  if self._options.hltProcess:
1670  if len(self._options.customise_commands) > 1:
1671  self._options.customise_commands = self._options.customise_commands + " \n"
1672  self._options.customise_commands = self._options.customise_commands + "process.patTrigger.processName = \""+self._options.hltProcess+"\"\n"
1673  self._options.customise_commands = self._options.customise_commands + "process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1674 
1675 # self.renameHLTprocessInSequence(sequence)
1676 
1677  return
1678 
1679  def prepare_EI(self, sequence = None):
1680  ''' Enrich the schedule with event interpretation '''
1681  from Configuration.StandardSequences.EventInterpretation import EventInterpretation
1682  if sequence in EventInterpretation:
1683  self.EIDefaultCFF = EventInterpretation[sequence]
1684  sequence = 'EIsequence'
1685  else:
1686  raise Exception('Cannot set %s event interpretation'%( sequence) )
1687  self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1688  self.scheduleSequence(sequence.split('.')[-1],'eventinterpretaion_step')
1689  return
1690 
1691  def prepare_SKIM(self, sequence = "all"):
1692  ''' Enrich the schedule with skimming fragments'''
1693  skimConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.SKIMDefaultCFF)
1694  sequence = sequence.split('.')[-1]
1695 
1696  skimlist=sequence.split('+')
1697  ## support @Mu+DiJet+@Electron configuration via autoSkim.py
1698  from Configuration.Skimming.autoSkim import autoSkim
1699  self.expandMapping(skimlist,autoSkim)
1700 
1701  #print "dictionnary for skims:",skimConfig.__dict__
1702  for skim in skimConfig.__dict__:
1703  skimstream = getattr(skimConfig,skim)
1704  if isinstance(skimstream,cms.Path):
1705  #black list the alca path so that they do not appear in the cfg
1706  self.blacklist_paths.append(skimstream)
1707  if (not isinstance(skimstream,cms.FilteredStream)):
1708  continue
1709  shortname = skim.replace('SKIMStream','')
1710  if (sequence=="all"):
1711  self.addExtraStream(skim,skimstream)
1712  elif (shortname in skimlist):
1713  self.addExtraStream(skim,skimstream)
1714  #add a DQM eventcontent for this guy
1715  if self._options.datatier=='DQM':
1716  self.process.load(self.EVTCONTDefaultCFF)
1717  skimstreamDQM = cms.FilteredStream(
1718  responsible = skimstream.responsible,
1719  name = skimstream.name+'DQM',
1720  paths = skimstream.paths,
1721  selectEvents = skimstream.selectEvents,
1722  content = self._options.datatier+'EventContent',
1723  dataTier = cms.untracked.string(self._options.datatier)
1724  )
1725  self.addExtraStream(skim+'DQM',skimstreamDQM)
1726  for i in range(skimlist.count(shortname)):
1727  skimlist.remove(shortname)
1728 
1729 
1730 
1731  if (skimlist.__len__()!=0 and sequence!="all"):
1732  print 'WARNING, possible typo with SKIM:'+'+'.join(skimlist)
1733  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1734 
1735  def prepare_USER(self, sequence = None):
1736  ''' Enrich the schedule with a user defined sequence '''
1737  self.loadDefaultOrSpecifiedCFF(sequence,self.USERDefaultCFF)
1738  self.scheduleSequence(sequence.split('.')[-1],'user_step')
1739  return
1740 
1741  def prepare_POSTRECO(self, sequence = None):
1742  """ Enrich the schedule with the postreco step """
1743  self.loadAndRemember(self.POSTRECODefaultCFF)
1744  self.scheduleSequence('postreco_generator','postreco_step')
1745  return
1746 
1747 
1748  def prepare_VALIDATION(self, sequence = 'validation'):
1749  print sequence,"in preparing validation"
1750  self.loadDefaultOrSpecifiedCFF(sequence,self.VALIDATIONDefaultCFF)
1751  from Validation.Configuration.autoValidation import autoValidation
1752  #in case VALIDATION:something:somethingelse -> something,somethingelse
1753  sequence=sequence.split('.')[-1]
1754  if sequence.find(',')!=-1:
1755  prevalSeqName=sequence.split(',')[0].split('+')
1756  valSeqName=sequence.split(',')[1].split('+')
1757  self.expandMapping(prevalSeqName,autoValidation,index=0)
1758  self.expandMapping(valSeqName,autoValidation,index=1)
1759  else:
1760  if '@' in sequence:
1761  prevalSeqName=sequence.split('+')
1762  valSeqName=sequence.split('+')
1763  self.expandMapping(prevalSeqName,autoValidation,index=0)
1764  self.expandMapping(valSeqName,autoValidation,index=1)
1765  else:
1766  postfix=''
1767  if sequence:
1768  postfix='_'+sequence
1769  prevalSeqName=['prevalidation'+postfix]
1770  valSeqName=['validation'+postfix]
1771  if not hasattr(self.process,valSeqName[0]):
1772  prevalSeqName=['']
1773  valSeqName=[sequence]
1774 
1775  def NFI(index):
1776  ##name from index, required to keep backward compatibility
1777  if index==0:
1778  return ''
1779  else:
1780  return '%s'%index
1781 
1782 
1783  #rename the HLT process in validation steps
1784  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1785  for s in valSeqName+prevalSeqName:
1786  if s:
1788  for (i,s) in enumerate(prevalSeqName):
1789  if s:
1790  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1791  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1792 
1793  for (i,s) in enumerate(valSeqName):
1794  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1795  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1796 
1797  #needed in case the miniAODValidation sequence is run starting from AODSIM
1798  if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1799  return
1800 
1801  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1802  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1803  self._options.restoreRNDSeeds=True
1804 
1805  if not 'DIGI' in self.stepMap and not self._options.fast:
1806  self.executeAndRemember("process.mix.playback = True")
1807  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1808  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1809  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1810 
1811  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1812  #will get in the schedule, smoothly
1813  for (i,s) in enumerate(valSeqName):
1814  getattr(self.process,'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,'validation_step%s'%NFI(i))._seq
1815 
1816  return
1817 
1818 
1820  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1821  It will climb down within PSets, VPSets and VInputTags to find its target"""
1822  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1823  self._paramReplace = paramReplace
1824  self._paramSearch = paramSearch
1825  self._verbose = verbose
1826  self._whitelist = whitelist
1827 
1828  def doIt(self,pset,base):
1829  if isinstance(pset, cms._Parameterizable):
1830  for name in pset.parameters_().keys():
1831  # skip whitelisted parameters
1832  if name in self._whitelist:
1833  continue
1834  # if I use pset.parameters_().items() I get copies of the parameter values
1835  # so I can't modify the nested pset
1836  value = getattr(pset,name)
1837  type = value.pythonTypeName()
1838  if type in ('cms.PSet', 'cms.untracked.PSet'):
1839  self.doIt(value,base+"."+name)
1840  elif type in ('cms.VPSet', 'cms.untracked.VPSet'):
1841  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1842  elif type in ('cms.string', 'cms.untracked.string'):
1843  if value.value() == self._paramSearch:
1844  if self._verbose: print "set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace)
1845  setattr(pset, name,self._paramReplace)
1846  elif type in ('cms.VInputTag', 'cms.untracked.VInputTag'):
1847  for (i,n) in enumerate(value):
1848  if not isinstance(n, cms.InputTag):
1849  n=cms.InputTag(n)
1850  if n.processName == self._paramSearch:
1851  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1852  if self._verbose:print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace)
1853  setattr(n,"processName",self._paramReplace)
1854  value[i]=n
1855  elif type in ('cms.vstring', 'cms.untracked.vstring'):
1856  for (i,n) in enumerate(value):
1857  if n==self._paramSearch:
1858  getattr(pset,name)[i]=self._paramReplace
1859  elif type in ('cms.InputTag', 'cms.untracked.InputTag'):
1860  if value.processName == self._paramSearch:
1861  if self._verbose: print "set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace)
1862  setattr(getattr(pset, name),"processName",self._paramReplace)
1863 
1864  def enter(self,visitee):
1865  label = ''
1866  try:
1867  label = visitee.label()
1868  except AttributeError:
1869  label = '<Module not in a Process>'
1870  except:
1871  label = 'other execption'
1872  self.doIt(visitee, label)
1873 
1874  def leave(self,visitee):
1875  pass
1876 
1877  #visit a sequence to repalce all input tags
1878  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1879  print "Replacing all InputTag %s => %s"%(oldT,newT)
1880  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1881  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1882  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1883  if not loadMe in self.additionalCommands:
1884  self.additionalCommands.append(loadMe)
1885  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1886 
1887  #change the process name used to address HLT results in any sequence
1888  def renameHLTprocessInSequence(self,sequence,proc=None,HLTprocess='HLT'):
1889  if self._options.hltProcess:
1890  proc=self._options.hltProcess
1891  else:
1892  proc=self.process.name_()
1893  if proc==HLTprocess: return
1894  # look up all module in dqm sequence
1895  print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1896  getattr(self.process,sequence).visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess,proc,whitelist = ("subSystemFolder",)))
1897  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1898  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1899  self.additionalCommands.append('process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1900 
1901 
1902  def expandMapping(self,seqList,mapping,index=None):
1903  maxLevel=20
1904  level=0
1905  while '@' in repr(seqList) and level<maxLevel:
1906  level+=1
1907  for specifiedCommand in seqList:
1908  if specifiedCommand.startswith('@'):
1909  location=specifiedCommand[1:]
1910  if not location in mapping:
1911  raise Exception("Impossible to map "+location+" from "+repr(mapping))
1912  mappedTo=mapping[location]
1913  if index!=None:
1914  mappedTo=mappedTo[index]
1915  seqList.remove(specifiedCommand)
1916  seqList.extend(mappedTo.split('+'))
1917  break;
1918  if level==maxLevel:
1919  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
1920 
1921  def prepare_DQM(self, sequence = 'DQMOffline'):
1922  # this one needs replacement
1923 
1924  self.loadDefaultOrSpecifiedCFF(sequence,self.DQMOFFLINEDefaultCFF)
1925  sequenceList=sequence.split('.')[-1].split('+')
1926  postSequenceList=sequence.split('.')[-1].split('+')
1927  from DQMOffline.Configuration.autoDQM import autoDQM
1928  self.expandMapping(sequenceList,autoDQM,index=0)
1929  self.expandMapping(postSequenceList,autoDQM,index=1)
1930 
1931  if len(set(sequenceList))!=len(sequenceList):
1932  sequenceList=list(set(sequenceList))
1933  print "Duplicate entries for DQM:, using",sequenceList
1934 
1935  pathName='dqmoffline_step'
1936  for (i,sequence) in enumerate(sequenceList):
1937  if (i!=0):
1938  pathName='dqmoffline_%d_step'%(i)
1939 
1940  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
1941  self.renameHLTprocessInSequence(sequence)
1942 
1943  setattr(self.process,pathName, cms.EndPath( getattr(self.process,sequence ) ) )
1944  self.schedule.append(getattr(self.process,pathName))
1945 
1946  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1947  #will get in the schedule, smoothly
1948  getattr(self.process,pathName).insert(0,self.process.genstepfilter)
1949 
1950  pathName='dqmofflineOnPAT_step'
1951  for (i,sequence) in enumerate(postSequenceList):
1952  if (i!=0):
1953  pathName='dqmofflineOnPAT_%d_step'%(i)
1954 
1955  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1956  self.schedule.append(getattr(self.process,pathName))
1957 
1958  def prepare_HARVESTING(self, sequence = None):
1959  """ Enrich the process with harvesting step """
1960  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
1961  self.loadAndRemember(self.DQMSaverCFF)
1962 
1963  harvestingConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.HARVESTINGDefaultCFF)
1964  sequence = sequence.split('.')[-1]
1965 
1966  # decide which HARVESTING paths to use
1967  harvestingList = sequence.split("+")
1968  from DQMOffline.Configuration.autoDQM import autoDQM
1969  from Validation.Configuration.autoValidation import autoValidation
1970  import copy
1971  combined_mapping = copy.deepcopy( autoDQM )
1972  combined_mapping.update( autoValidation )
1973  self.expandMapping(harvestingList,combined_mapping,index=-1)
1974 
1975  if len(set(harvestingList))!=len(harvestingList):
1976  harvestingList=list(set(harvestingList))
1977  print "Duplicate entries for HARVESTING, using",harvestingList
1978 
1979  for name in harvestingList:
1980  if not name in harvestingConfig.__dict__:
1981  print name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1982  continue
1983  harvestingstream = getattr(harvestingConfig,name)
1984  if isinstance(harvestingstream,cms.Path):
1985  self.schedule.append(harvestingstream)
1986  self.blacklist_paths.append(harvestingstream)
1987  if isinstance(harvestingstream,cms.Sequence):
1988  setattr(self.process,name+"_step",cms.Path(harvestingstream))
1989  self.schedule.append(getattr(self.process,name+"_step"))
1990 
1991  self.scheduleSequence('DQMSaver','dqmsave_step')
1992  return
1993 
1994  def prepare_ALCAHARVEST(self, sequence = None):
1995  """ Enrich the process with AlCaHarvesting step """
1996  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
1997  sequence=sequence.split(".")[-1]
1998 
1999  # decide which AlcaHARVESTING paths to use
2000  harvestingList = sequence.split("+")
2001 
2002 
2003 
2004  from Configuration.AlCa.autoPCL import autoPCL
2005  self.expandMapping(harvestingList,autoPCL)
2006 
2007  for name in harvestingConfig.__dict__:
2008  harvestingstream = getattr(harvestingConfig,name)
2009  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2010  self.schedule.append(harvestingstream)
2011  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2012  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2013  harvestingList.remove(name)
2014  # append the common part at the end of the sequence
2015  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2016  self.schedule.append(lastStep)
2017 
2018  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2019  print "The following harvesting could not be found : ", harvestingList
2020  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2021 
2022 
2023 
2024  def prepare_ENDJOB(self, sequence = 'endOfProcess'):
2025  self.loadDefaultOrSpecifiedCFF(sequence,self.ENDJOBDefaultCFF)
2026  self.scheduleSequenceAtEnd(sequence.split('.')[-1],'endjob_step')
2027  return
2028 
2030  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2031  self.schedule.append(self.process.reconstruction)
2032 
2033 
2034  def build_production_info(self, evt_type, evtnumber):
2035  """ Add useful info for the production. """
2036  self.process.configurationMetadata=cms.untracked.PSet\
2037  (version=cms.untracked.string("$Revision: 1.19 $"),
2038  name=cms.untracked.string("Applications"),
2039  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2040  )
2041 
2042  self.addedObjects.append(("Production Info","configurationMetadata"))
2043 
2044 
2045  def prepare(self, doChecking = False):
2046  """ Prepare the configuration string and add missing pieces."""
2047 
2048  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2049  self.addMaxEvents()
2050  if self.with_input:
2051  self.addSource()
2052  self.addStandardSequences()
2053  ##adding standard sequences might change the inputEventContent option and therefore needs to be finalized after
2054  self.completeInputCommand()
2055  self.addConditions()
2056 
2057 
2058  outputModuleCfgCode=""
2059  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2060  outputModuleCfgCode=self.addOutput()
2061 
2062  self.addCommon()
2063 
2064  self.pythonCfgCode = "# Auto generated configuration file\n"
2065  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2066  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2067  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2068  if hasattr(self._options,"era") and self._options.era :
2069  self.pythonCfgCode += "from Configuration.StandardSequences.Eras import eras\n\n"
2070  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"'" # Start of the line, finished after the loop
2071  # Multiple eras can be specified in a comma seperated list
2072  for requestedEra in self._options.era.split(",") :
2073  self.pythonCfgCode += ",eras."+requestedEra
2074  self.pythonCfgCode += ")\n\n" # end of the line
2075  else :
2076  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"')\n\n"
2077 
2078  self.pythonCfgCode += "# import of standard configurations\n"
2079  for module in self.imports:
2080  self.pythonCfgCode += ("process.load('"+module+"')\n")
2081 
2082  # production info
2083  if not hasattr(self.process,"configurationMetadata"):
2084  self.build_production_info(self._options.evt_type, self._options.number)
2085  else:
2086  #the PSet was added via a load
2087  self.addedObjects.append(("Production Info","configurationMetadata"))
2088 
2089  self.pythonCfgCode +="\n"
2090  for comment,object in self.addedObjects:
2091  if comment!="":
2092  self.pythonCfgCode += "\n# "+comment+"\n"
2093  self.pythonCfgCode += dumpPython(self.process,object)
2094 
2095  # dump the output definition
2096  self.pythonCfgCode += "\n# Output definition\n"
2097  self.pythonCfgCode += outputModuleCfgCode
2098 
2099  # dump all additional outputs (e.g. alca or skim streams)
2100  self.pythonCfgCode += "\n# Additional output definition\n"
2101  #I do not understand why the keys are not normally ordered.
2102  nl=self.additionalOutputs.keys()
2103  nl.sort()
2104  for name in nl:
2105  output = self.additionalOutputs[name]
2106  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2107  tmpOut = cms.EndPath(output)
2108  setattr(self.process,name+'OutPath',tmpOut)
2109  self.schedule.append(tmpOut)
2110 
2111  # dump all additional commands
2112  self.pythonCfgCode += "\n# Other statements\n"
2113  for command in self.additionalCommands:
2114  self.pythonCfgCode += command + "\n"
2115 
2116  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2117  for object in self._options.inlineObjets.split(','):
2118  if not object:
2119  continue
2120  if not hasattr(self.process,object):
2121  print 'cannot inline -'+object+'- : not known'
2122  else:
2123  self.pythonCfgCode +='\n'
2124  self.pythonCfgCode +=dumpPython(self.process,object)
2125 
2126  # dump all paths
2127  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2128  for path in self.process.paths:
2129  if getattr(self.process,path) not in self.blacklist_paths:
2130  self.pythonCfgCode += dumpPython(self.process,path)
2131 
2132  for endpath in self.process.endpaths:
2133  if getattr(self.process,endpath) not in self.blacklist_paths:
2134  self.pythonCfgCode += dumpPython(self.process,endpath)
2135 
2136  # dump the schedule
2137  self.pythonCfgCode += "\n# Schedule definition\n"
2138  result = "process.schedule = cms.Schedule("
2139 
2140  # handling of the schedule
2141  self.process.schedule = cms.Schedule()
2142  for item in self.schedule:
2143  if not isinstance(item, cms.Schedule):
2144  self.process.schedule.append(item)
2145  else:
2146  self.process.schedule.extend(item)
2147 
2148  if hasattr(self.process,"HLTSchedule"):
2149  beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2150  afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2151  pathNames = ['process.'+p.label_() for p in beforeHLT]
2152  result += ','.join(pathNames)+')\n'
2153  result += 'process.schedule.extend(process.HLTSchedule)\n'
2154  pathNames = ['process.'+p.label_() for p in afterHLT]
2155  result += 'process.schedule.extend(['+','.join(pathNames)+'])\n'
2156  else:
2157  pathNames = ['process.'+p.label_() for p in self.schedule]
2158  result ='process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2159 
2160  self.pythonCfgCode += result
2161 
2162  for labelToAssociate in self.labelsToAssociate:
2163  self.process.schedule.associate(getattr(self.process, labelToAssociate))
2164  self.pythonCfgCode += 'process.schedule.associate(process.' + labelToAssociate + ')\n'
2165 
2166  from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
2167  associatePatAlgosToolsTask(self.process)
2168  self.pythonCfgCode+="from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n"
2169  self.pythonCfgCode+="associatePatAlgosToolsTask(process)\n"
2170 
2171  if self._options.nThreads is not "1":
2172  self.pythonCfgCode +="\n"
2173  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2174  self.pythonCfgCode +="process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+")\n"
2175  self.pythonCfgCode +="process.options.numberOfStreams=cms.untracked.uint32(0)\n"
2176  #repacked version
2177  if self._options.isRepacked:
2178  self.pythonCfgCode +="\n"
2179  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2180  self.pythonCfgCode +="MassReplaceInputTag(process)\n"
2181  MassReplaceInputTag(self.process)
2182 
2183  # special treatment in case of production filter sequence 2/2
2184  if self.productionFilterSequence:
2185  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2186  self.pythonCfgCode +='for path in process.paths:\n'
2187  if len(self.conditionalPaths):
2188  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2189  if len(self.excludedPaths):
2190  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2191  self.pythonCfgCode +='\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2192  pfs = getattr(self.process,self.productionFilterSequence)
2193  for path in self.process.paths:
2194  if not path in self.conditionalPaths: continue
2195  if path in self.excludedPaths: continue
2196  getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2197 
2198 
2199  # dump customise fragment
2200  self.pythonCfgCode += self.addCustomise()
2201 
2202  if self._options.runUnscheduled:
2203  # prune and delete paths
2204  #this is not supporting the blacklist at this point since I do not understand it
2205  self.pythonCfgCode+="#do not add changes to your config after this point (unless you know what you are doing)\n"
2206  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import convertToUnscheduled\n"
2207  self.pythonCfgCode+="process=convertToUnscheduled(process)\n"
2208 
2209  from FWCore.ParameterSet.Utilities import convertToUnscheduled
2210  self.process=convertToUnscheduled(self.process)
2211 
2212  self.pythonCfgCode += self.addCustomise(1)
2213 
2214  self.pythonCfgCode += self.addCustomiseCmdLine()
2215 
2216  # Temporary hack to put the early delete customization after
2217  # everything else
2218  #
2219  # FIXME: remove when no longer needed
2220  self.pythonCfgCode += "\n# Add early deletion of temporary data products to reduce peak memory need\n"
2221  self.pythonCfgCode += "from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2222  self.pythonCfgCode += "process = customiseEarlyDelete(process)\n"
2223  self.pythonCfgCode += "# End adding early deletion\n"
2224  from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
2225  self.process = customiseEarlyDelete(self.process)
2226 
2227 
2228  # make the .io file
2229 
2230  if self._options.io:
2231  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2232  if not self._options.io.endswith('.io'): self._option.io+='.io'
2233  io=open(self._options.io,'w')
2234  ioJson={}
2235  if hasattr(self.process.source,"fileNames"):
2236  if len(self.process.source.fileNames.value()):
2237  ioJson['primary']=self.process.source.fileNames.value()
2238  if hasattr(self.process.source,"secondaryFileNames"):
2239  if len(self.process.source.secondaryFileNames.value()):
2240  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2241  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2242  ioJson['pileup']=self._options.pileup_input[4:]
2243  for (o,om) in self.process.outputModules_().items():
2244  ioJson[o]=om.fileName.value()
2245  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2246  if self.productionFilterSequence:
2247  ioJson['filter']=self.productionFilterSequence
2248  import json
2249  io.write(json.dumps(ioJson))
2250  return
2251 
def load(self, includeFile)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:34
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
Definition: MassReplace.py:71
def prepare_L1REPACK(self, sequence=None)
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def doNotInlineEventContent(instance, label="cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)")
event content
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def ProcessName(process)
Definition: CustomConfigs.py:8
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_DIGIPREMIX(self, sequence=None)
def prepare_RECO(self, sequence="reconstruction")
put the filtering path in the schedule
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def addCustomise(self, unsch=0)
def prepare_DIGIPREMIX_S2(self, sequence=None)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
now for a useful command
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
Definition: HCMethods.h:49
def prepare_DIGI(self, sequence=None)
def convertToUnscheduled(proc)
Definition: Utilities.py:45
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
inliner
needs to be in reverse order
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def prepare_ALCAHARVEST(self, sequence=None)
def defineMixing(dict)
Definition: Mixing.py:177
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
Definition: helpers.py:23
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
double split
Definition: MVATrainer.cc:139
def completeInputCommand(self)
add the corresponding input content
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def prepare_PAT(self, sequence="miniAOD")