CMS 3D CMS Logo

ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python
2 
3 __version__ = "$Revision: 1.19 $"
4 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
5 
6 import FWCore.ParameterSet.Config as cms
7 from FWCore.ParameterSet.Modules import _Module
8 
9 # The following import is provided for backward compatibility reasons.
10 # The function used to be defined in this file.
11 from FWCore.ParameterSet.MassReplace import massReplaceInputTag as MassReplaceInputTag
12 
13 import sys
14 import re
15 import collections
16 from subprocess import Popen,PIPE
17 import FWCore.ParameterSet.DictTypes as DictTypes
18 class Options:
19  pass
20 
21 # the canonical defaults
22 defaultOptions = Options()
23 defaultOptions.datamix = 'DataOnSim'
24 defaultOptions.isMC=False
25 defaultOptions.isData=True
26 defaultOptions.step=''
27 defaultOptions.pileup='NoPileUp'
28 defaultOptions.pileup_input = None
29 defaultOptions.pileup_dasoption = ''
30 defaultOptions.geometry = 'SimDB'
31 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
32 defaultOptions.magField = ''
33 defaultOptions.conditions = None
34 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
35 defaultOptions.harvesting= 'AtRunEnd'
36 defaultOptions.gflash = False
37 defaultOptions.number = -1
38 defaultOptions.number_out = None
39 defaultOptions.arguments = ""
40 defaultOptions.name = "NO NAME GIVEN"
41 defaultOptions.evt_type = ""
42 defaultOptions.filein = ""
43 defaultOptions.dasquery=""
44 defaultOptions.dasoption=""
45 defaultOptions.secondfilein = ""
46 defaultOptions.customisation_file = []
47 defaultOptions.customisation_file_unsch = []
48 defaultOptions.customise_commands = ""
49 defaultOptions.inline_custom=False
50 defaultOptions.particleTable = 'pythiapdt'
51 defaultOptions.particleTableList = ['pythiapdt','pdt']
52 defaultOptions.dirin = ''
53 defaultOptions.dirout = ''
54 defaultOptions.filetype = 'EDM'
55 defaultOptions.fileout = 'output.root'
56 defaultOptions.filtername = ''
57 defaultOptions.lazy_download = False
58 defaultOptions.custom_conditions = ''
59 defaultOptions.hltProcess = ''
60 defaultOptions.eventcontent = None
61 defaultOptions.datatier = None
62 defaultOptions.inlineEventContent = True
63 defaultOptions.inlineObjets =''
64 defaultOptions.hideGen=False
65 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
66 defaultOptions.beamspot=None
67 defaultOptions.outputDefinition =''
68 defaultOptions.inputCommands = None
69 defaultOptions.outputCommands = None
70 defaultOptions.inputEventContent = ''
71 defaultOptions.dropDescendant = False
72 defaultOptions.relval = None
73 defaultOptions.profile = None
74 defaultOptions.isRepacked = False
75 defaultOptions.restoreRNDSeeds = False
76 defaultOptions.donotDropOnInput = ''
77 defaultOptions.python_filename =''
78 defaultOptions.io=None
79 defaultOptions.lumiToProcess=None
80 defaultOptions.fast=False
81 defaultOptions.runsAndWeightsForMC = None
82 defaultOptions.runsScenarioForMC = None
83 defaultOptions.runUnscheduled = False
84 defaultOptions.timeoutOutput = False
85 defaultOptions.nThreads = '1'
86 
87 # some helper routines
88 def dumpPython(process,name):
89  theObject = getattr(process,name)
90  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
91  return "process."+name+" = " + theObject.dumpPython("process")
92  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
93  return "process."+name+" = " + theObject.dumpPython()+"\n"
94  else:
95  return "process."+name+" = " + theObject.dumpPython()+"\n"
96 def filesFromList(fileName,s=None):
97  import os
98  import FWCore.ParameterSet.Config as cms
99  prim=[]
100  sec=[]
101  for line in open(fileName,'r'):
102  if line.count(".root")>=2:
103  #two files solution...
104  entries=line.replace("\n","").split()
105  if not entries[0] in prim:
106  prim.append(entries[0])
107  if not entries[1] in sec:
108  sec.append(entries[1])
109  elif (line.find(".root")!=-1):
110  entry=line.replace("\n","")
111  if not entry in prim:
112  prim.append(entry)
113  if s:
114  if not hasattr(s,"fileNames"):
115  s.fileNames=cms.untracked.vstring(prim)
116  else:
117  s.fileNames.extend(prim)
118  if len(sec)!=0:
119  if not hasattr(s,"secondaryFileNames"):
120  s.secondaryFileNames=cms.untracked.vstring(sec)
121  else:
122  s.secondaryFileNames.extend(sec)
123  print "found files: ",prim
124  if len(prim)==0:
125  raise Exception("There are not files in input from the file list")
126  if len(sec)!=0:
127  print "found parent files:",sec
128  return (prim,sec)
129 
130 def filesFromDASQuery(query,option="",s=None):
131  import os,time
132  import FWCore.ParameterSet.Config as cms
133  prim=[]
134  sec=[]
135  print "the query is",query
136  eC=5
137  count=0
138  while eC!=0 and count<3:
139  if count!=0:
140  print 'Sleeping, then retrying DAS'
141  time.sleep(100)
142  p = Popen('das_client %s --query "%s"'%(option,query), stdout=PIPE,shell=True)
143  pipe=p.stdout.read()
144  tupleP = os.waitpid(p.pid, 0)
145  eC=tupleP[1]
146  count=count+1
147  if eC==0:
148  print "DAS succeeded after",count,"attempts",eC
149  else:
150  print "DAS failed 3 times- I give up"
151  for line in pipe.split('\n'):
152  if line.count(".root")>=2:
153  #two files solution...
154  entries=line.replace("\n","").split()
155  if not entries[0] in prim:
156  prim.append(entries[0])
157  if not entries[1] in sec:
158  sec.append(entries[1])
159  elif (line.find(".root")!=-1):
160  entry=line.replace("\n","")
161  if not entry in prim:
162  prim.append(entry)
163  if s:
164  if not hasattr(s,"fileNames"):
165  s.fileNames=cms.untracked.vstring(prim)
166  else:
167  s.fileNames.extend(prim)
168  if len(sec)!=0:
169  if not hasattr(s,"secondaryFileNames"):
170  s.secondaryFileNames=cms.untracked.vstring(sec)
171  else:
172  s.secondaryFileNames.extend(sec)
173  print "found files: ",prim
174  if len(sec)!=0:
175  print "found parent files:",sec
176  return (prim,sec)
177 
178 def anyOf(listOfKeys,dict,opt=None):
179  for k in listOfKeys:
180  if k in dict:
181  toReturn=dict[k]
182  dict.pop(k)
183  return toReturn
184  if opt!=None:
185  return opt
186  else:
187  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
188 
190  """The main building routines """
191 
192  def __init__(self, options, process = None, with_output = False, with_input = False ):
193  """options taken from old cmsDriver and optparse """
194 
195  options.outfile_name = options.dirout+options.fileout
196 
197  self._options = options
198 
199  if self._options.isData and options.isMC:
200  raise Exception("ERROR: You may specify only --data or --mc, not both")
201  #if not self._options.conditions:
202  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
203 
204  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
205  if 'ENDJOB' in self._options.step:
206  if (hasattr(self._options,"outputDefinition") and \
207  self._options.outputDefinition != '' and \
208  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
209  (hasattr(self._options,"datatier") and \
210  self._options.datatier and \
211  'DQMIO' in self._options.datatier):
212  print "removing ENDJOB from steps since not compatible with DQMIO dataTier"
213  self._options.step=self._options.step.replace(',ENDJOB','')
214 
215 
216 
217  # what steps are provided by this class?
218  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
219  self.stepMap={}
220  self.stepKeys=[]
221  for step in self._options.step.split(","):
222  if step=='': continue
223  stepParts = step.split(":")
224  stepName = stepParts[0]
225  if stepName not in stepList and not stepName.startswith('re'):
226  raise ValueError("Step "+stepName+" unknown")
227  if len(stepParts)==1:
228  self.stepMap[stepName]=""
229  elif len(stepParts)==2:
230  self.stepMap[stepName]=stepParts[1].split('+')
231  elif len(stepParts)==3:
232  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
233  else:
234  raise ValueError("Step definition "+step+" invalid")
235  self.stepKeys.append(stepName)
236 
237  #print "map of steps is:",self.stepMap
238 
239  self.with_output = with_output
240  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
241  self.with_output = False
242  self.with_input = with_input
243  if process == None:
244  self.process = cms.Process(self._options.name)
245  else:
246  self.process = process
247  self.imports = []
248  self.define_Configs()
249  self.schedule = list()
250 
251  # we are doing three things here:
252  # creating a process to catch errors
253  # building the code to re-create the process
254 
255  self.additionalCommands = []
256  # TODO: maybe a list of to be dumped objects would help as well
257  self.blacklist_paths = []
258  self.addedObjects = []
259  self.additionalOutputs = {}
260 
261  self.productionFilterSequence = None
262  self.labelsToAssociate=[]
263  self.nextScheduleIsConditional=False
264  self.conditionalPaths=[]
265  self.excludedPaths=[]
266 
267  def profileOptions(self):
268  """
269  addIgProfService
270  Function to add the igprof profile service so that you can dump in the middle
271  of the run.
272  """
273  profileOpts = self._options.profile.split(':')
274  profilerStart = 1
275  profilerInterval = 100
276  profilerFormat = None
277  profilerJobFormat = None
278 
279  if len(profileOpts):
280  #type, given as first argument is unused here
281  profileOpts.pop(0)
282  if len(profileOpts):
283  startEvent = profileOpts.pop(0)
284  if not startEvent.isdigit():
285  raise Exception("%s is not a number" % startEvent)
286  profilerStart = int(startEvent)
287  if len(profileOpts):
288  eventInterval = profileOpts.pop(0)
289  if not eventInterval.isdigit():
290  raise Exception("%s is not a number" % eventInterval)
291  profilerInterval = int(eventInterval)
292  if len(profileOpts):
293  profilerFormat = profileOpts.pop(0)
294 
295 
296  if not profilerFormat:
297  profilerFormat = "%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace("_cfi", ""),
298  self._options.step,
299  self._options.pileup,
300  self._options.conditions,
301  self._options.datatier,
302  self._options.profileTypeLabel)
303  if not profilerJobFormat and profilerFormat.endswith(".gz"):
304  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
305  elif not profilerJobFormat:
306  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
307 
308  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
309 
310  def load(self,includeFile):
311  includeFile = includeFile.replace('/','.')
312  self.process.load(includeFile)
313  return sys.modules[includeFile]
314 
315  def loadAndRemember(self, includeFile):
316  """helper routine to load am memorize imports"""
317  # we could make the imports a on-the-fly data method of the process instance itself
318  # not sure if the latter is a good idea
319  includeFile = includeFile.replace('/','.')
320  self.imports.append(includeFile)
321  self.process.load(includeFile)
322  return sys.modules[includeFile]
323 
324  def executeAndRemember(self, command):
325  """helper routine to remember replace statements"""
326  self.additionalCommands.append(command)
327  if not command.strip().startswith("#"):
328  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
329  import re
330  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
331  #exec(command.replace("process.","self.process."))
332 
333  def addCommon(self):
334  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
335  self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring('ProductNotFound'),fileMode = cms.untracked.string('FULLMERGE'))
336  else:
337  self.process.options = cms.untracked.PSet( )
338 
339  self.addedObjects.append(("","options"))
340 
341  if self._options.lazy_download:
342  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
343  stats = cms.untracked.bool(True),
344  enable = cms.untracked.bool(True),
345  cacheHint = cms.untracked.string("lazy-download"),
346  readHint = cms.untracked.string("read-ahead-buffered")
347  )
348  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
349 
350  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
351  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
352 
353  if self._options.profile:
354  (start, interval, eventFormat, jobFormat)=self.profileOptions()
355  self.process.IgProfService = cms.Service("IgProfService",
356  reportFirstEvent = cms.untracked.int32(start),
357  reportEventInterval = cms.untracked.int32(interval),
358  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
359  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
360  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
361 
362  def addMaxEvents(self):
363  """Here we decide how many evts will be processed"""
364  self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(int(self._options.number)))
365  if self._options.number_out:
366  self.process.maxEvents.output = cms.untracked.int32(int(self._options.number_out))
367  self.addedObjects.append(("","maxEvents"))
368 
369  def addSource(self):
370  """Here the source is built. Priority: file, generator"""
371  self.addedObjects.append(("Input source","source"))
372 
373  def filesFromOption(self):
374  for entry in self._options.filein.split(','):
375  print "entry",entry
376  if entry.startswith("filelist:"):
377  filesFromList(entry[9:],self.process.source)
378  elif entry.startswith("dbs:") or entry.startswith("das:"):
379  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
380  else:
381  self.process.source.fileNames.append(self._options.dirin+entry)
382  if self._options.secondfilein:
383  if not hasattr(self.process.source,"secondaryFileNames"):
384  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
385  for entry in self._options.secondfilein.split(','):
386  print "entry",entry
387  if entry.startswith("filelist:"):
388  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
389  elif entry.startswith("dbs:") or entry.startswith("das:"):
390  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
391  else:
392  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
393 
394  if self._options.filein or self._options.dasquery:
395  if self._options.filetype == "EDM":
396  self.process.source=cms.Source("PoolSource",
397  fileNames = cms.untracked.vstring(),
398  secondaryFileNames= cms.untracked.vstring())
399  filesFromOption(self)
400  elif self._options.filetype == "DAT":
401  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
402  filesFromOption(self)
403  elif self._options.filetype == "LHE":
404  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
405  if self._options.filein.startswith("lhe:"):
406  #list the article directory automatically
407  args=self._options.filein.split(':')
408  article=args[1]
409  print 'LHE input from article ',article
410  location='/store/lhe/'
411  import os
412  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
413  for line in textOfFiles:
414  for fileName in [x for x in line.split() if '.lhe' in x]:
415  self.process.source.fileNames.append(location+article+'/'+fileName)
416  #check first if list of LHE files is loaded (not empty)
417  if len(line)<2:
418  print 'Issue to load LHE files, please check and try again.'
419  sys.exit(-1)
420  #Additional check to protect empty fileNames in process.source
421  if len(self.process.source.fileNames)==0:
422  print 'Issue with empty filename, but can pass line check'
423  sys.exit(-1)
424  if len(args)>2:
425  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
426  else:
427  filesFromOption(self)
428 
429  elif self._options.filetype == "DQM":
430  self.process.source=cms.Source("DQMRootSource",
431  fileNames = cms.untracked.vstring())
432  filesFromOption(self)
433 
434  elif self._options.filetype == "DQMDAQ":
435  # FIXME: how to configure it if there are no input files specified?
436  self.process.source=cms.Source("DQMStreamerReader")
437 
438 
439  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
440  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
441 
442  if self._options.dasquery!='':
443  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
444  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
445 
446  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
447  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
448 
449  ##drop LHEXMLStringProduct on input to save memory if appropriate
450  if 'GEN' in self.stepMap.keys():
451  if self._options.inputCommands:
452  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
453  else:
454  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
455 
456  if self.process.source and self._options.inputCommands:
457  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
458  for command in self._options.inputCommands.split(','):
459  # remove whitespace around the keep/drop statements
460  command = command.strip()
461  if command=='': continue
462  self.process.source.inputCommands.append(command)
463  if not self._options.dropDescendant:
464  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
465 
466  if self._options.lumiToProcess:
467  import FWCore.PythonUtilities.LumiList as LumiList
468  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
469 
470  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
471  if self.process.source is None:
472  self.process.source=cms.Source("EmptySource")
473 
474  # modify source in case of run-dependent MC
475  self.runsAndWeights=None
476  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
477  if not self._options.isMC :
478  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
479  if self._options.runsAndWeightsForMC:
480  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
481  else:
482  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
483  if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
484  __import__(RunsAndWeights[self._options.runsScenarioForMC])
485  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
486  else:
487  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
488 
489  if self.runsAndWeights:
490  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
491  ThrowAndSetRandomRun.throwAndSetRandomRun(self.process.source,self.runsAndWeights)
492  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
493  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
494 
495  return
496 
497  def addOutput(self):
498  """ Add output module to the process """
499  result=""
500  if self._options.outputDefinition:
501  if self._options.datatier:
502  print "--datatier & --eventcontent options ignored"
503 
504  #new output convention with a list of dict
505  outList = eval(self._options.outputDefinition)
506  for (id,outDefDict) in enumerate(outList):
507  outDefDictStr=outDefDict.__str__()
508  if not isinstance(outDefDict,dict):
509  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
510  #requires option: tier
511  theTier=anyOf(['t','tier','dataTier'],outDefDict)
512  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
513  ## event content
514  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
515  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
516  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
517  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
518  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
519  # module label has a particular role
520  if not theModuleLabel:
521  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
522  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
523  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
524  ]
525  for name in tryNames:
526  if not hasattr(self.process,name):
527  theModuleLabel=name
528  break
529  if not theModuleLabel:
530  raise Exception("cannot find a module label for specification: "+outDefDictStr)
531  if id==0:
532  defaultFileName=self._options.outfile_name
533  else:
534  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
535 
536  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
537  if not theFileName.endswith('.root'):
538  theFileName+='.root'
539 
540  if len(outDefDict.keys()):
541  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
542  if theStreamType=='DQMIO': theStreamType='DQM'
543  if theStreamType=='ALL':
544  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
545  else:
546  theEventContent = getattr(self.process, theStreamType+"EventContent")
547 
548 
549  addAlCaSelects=False
550  if theStreamType=='ALCARECO' and not theFilterName:
551  theFilterName='StreamALCACombined'
552  addAlCaSelects=True
553 
554  CppType='PoolOutputModule'
555  if self._options.timeoutOutput:
556  CppType='TimeoutPoolOutputModule'
557  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
558  output = cms.OutputModule(CppType,
559  theEventContent.clone(),
560  fileName = cms.untracked.string(theFileName),
561  dataset = cms.untracked.PSet(
562  dataTier = cms.untracked.string(theTier),
563  filterName = cms.untracked.string(theFilterName))
564  )
565  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
566  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
567  if not theSelectEvent and hasattr(self.process,'filtering_step'):
568  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
569  if theSelectEvent:
570  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
571 
572  if addAlCaSelects:
573  if not hasattr(output,'SelectEvents'):
574  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
575  for alca in self.AlCaPaths:
576  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
577 
578 
579  if hasattr(self.process,theModuleLabel):
580  raise Exception("the current process already has a module "+theModuleLabel+" defined")
581  #print "creating output module ",theModuleLabel
582  setattr(self.process,theModuleLabel,output)
583  outputModule=getattr(self.process,theModuleLabel)
584  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
585  path=getattr(self.process,theModuleLabel+'_step')
586  self.schedule.append(path)
587 
588  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
589  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"):
590  return label
591  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
592  if theExtraOutputCommands:
593  if not isinstance(theExtraOutputCommands,list):
594  raise Exception("extra ouput command in --option must be a list of strings")
595  if hasattr(self.process,theStreamType+"EventContent"):
596  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
597  else:
598  outputModule.outputCommands.extend(theExtraOutputCommands)
599 
600  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
601 
602  ##ends the --output options model
603  return result
604 
605  streamTypes=self._options.eventcontent.split(',')
606  tiers=self._options.datatier.split(',')
607  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
608  raise Exception("number of event content arguments does not match number of datatier arguments")
609 
610  # if the only step is alca we don't need to put in an output
611  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
612  return "\n"
613 
614  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
615  if streamType=='': continue
616  if streamType == 'ALCARECO' and not 'ALCAPRODUCER' in self._options.step: continue
617  if streamType=='DQMIO': streamType='DQM'
618  theEventContent = getattr(self.process, streamType+"EventContent")
619  if i==0:
620  theFileName=self._options.outfile_name
621  theFilterName=self._options.filtername
622  else:
623  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
624  theFilterName=self._options.filtername
625  CppType='PoolOutputModule'
626  if self._options.timeoutOutput:
627  CppType='TimeoutPoolOutputModule'
628  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
629  output = cms.OutputModule(CppType,
630  theEventContent,
631  fileName = cms.untracked.string(theFileName),
632  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
633  filterName = cms.untracked.string(theFilterName)
634  )
635  )
636  if hasattr(self.process,"generation_step") and streamType!='LHE':
637  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
638  if hasattr(self.process,"filtering_step"):
639  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
640 
641  if streamType=='ALCARECO':
642  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
643 
644  if "MINIAOD" in streamType:
645  from PhysicsTools.PatAlgos.slimming.miniAOD_tools import miniAOD_customizeOutput
647 
648  outputModuleName=streamType+'output'
649  setattr(self.process,outputModuleName,output)
650  outputModule=getattr(self.process,outputModuleName)
651  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
652  path=getattr(self.process,outputModuleName+'_step')
653  self.schedule.append(path)
654 
655  if self._options.outputCommands and streamType!='DQM':
656  for evct in self._options.outputCommands.split(','):
657  if not evct: continue
658  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
659 
660  if not self._options.inlineEventContent:
661  def doNotInlineEventContent(instance,label = "process."+streamType+"EventContent.outputCommands"):
662  return label
663  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
664 
665  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
666 
667  return result
668 
670  """
671  Add selected standard sequences to the process
672  """
673  # load the pile up file
674  if self._options.pileup:
675  pileupSpec=self._options.pileup.split(',')[0]
676 
677  # Does the requested pile-up scenario exist?
678  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
679  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
680  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
681  raise Exception(message)
682 
683  # Put mixing parameters in a dictionary
684  if '.' in pileupSpec:
685  mixingDict={'file':pileupSpec}
686  elif pileupSpec.startswith('file:'):
687  mixingDict={'file':pileupSpec[5:]}
688  else:
689  import copy
690  mixingDict=copy.copy(Mixing[pileupSpec])
691  if len(self._options.pileup.split(','))>1:
692  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
693 
694  # Load the pu cfg file corresponding to the requested pu scenario
695  if 'file:' in pileupSpec:
696  #the file is local
697  self.process.load(mixingDict['file'])
698  print "inlining mixing module configuration"
699  self._options.inlineObjets+=',mix'
700  else:
701  self.loadAndRemember(mixingDict['file'])
702 
703  mixingDict.pop('file')
704  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
705  if self._options.pileup_input:
706  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
707  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
708  elif self._options.pileup_input.startswith("filelist:"):
709  mixingDict['F']=(filesFromList(self._options.pileup_input[9:]))[0]
710  else:
711  mixingDict['F']=self._options.pileup_input.split(',')
712  specialization=defineMixing(mixingDict)
713  for command in specialization:
714  self.executeAndRemember(command)
715  if len(mixingDict)!=0:
716  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
717 
718 
719  # load the geometry file
720  try:
721  if len(self.stepMap):
722  self.loadAndRemember(self.GeometryCFF)
723  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
724  self.loadAndRemember(self.SimGeometryCFF)
725  if self.geometryDBLabel:
726  self.executeAndRemember('process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
727  except ImportError:
728  print "Geometry option",self._options.geometry,"unknown."
729  raise
730 
731  if len(self.stepMap):
732  self.loadAndRemember(self.magFieldCFF)
733 
734  for stepName in self.stepKeys:
735  stepSpec = self.stepMap[stepName]
736  print "Step:", stepName,"Spec:",stepSpec
737  if stepName.startswith('re'):
738  ##add the corresponding input content
739  if stepName[2:] not in self._options.donotDropOnInput:
740  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
741  stepName=stepName[2:]
742  if stepSpec=="":
743  getattr(self,"prepare_"+stepName)(sequence = getattr(self,stepName+"DefaultSeq"))
744  elif type(stepSpec)==list:
745  getattr(self,"prepare_"+stepName)(sequence = '+'.join(stepSpec))
746  elif type(stepSpec)==tuple:
747  getattr(self,"prepare_"+stepName)(sequence = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
748  else:
749  raise ValueError("Invalid step definition")
750 
751  if self._options.restoreRNDSeeds!=False:
752  #it is either True, or a process name
753  if self._options.restoreRNDSeeds==True:
754  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
755  else:
756  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
757  if self._options.inputEventContent or self._options.inputCommands:
758  if self._options.inputCommands:
759  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
760  else:
761  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
762 
763 
765  if self._options.inputEventContent:
766  import copy
767  def dropSecondDropStar(iec):
768  #drop occurence of 'drop *' in the list
769  count=0
770  for item in iec:
771  if item=='drop *':
772  if count!=0:
773  iec.remove(item)
774  count+=1
775 
776 
777  ## allow comma separated input eventcontent
778  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
779  for evct in self._options.inputEventContent.split(','):
780  if evct=='': continue
781  theEventContent = getattr(self.process, evct+"EventContent")
782  if hasattr(theEventContent,'outputCommands'):
783  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
784  if hasattr(theEventContent,'inputCommands'):
785  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
786 
787  dropSecondDropStar(self.process.source.inputCommands)
788 
789  if not self._options.dropDescendant:
790  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
791 
792 
793  return
794 
795  def addConditions(self):
796  """Add conditions to the process"""
797  if not self._options.conditions: return
798 
799  if 'FrontierConditions_GlobalTag' in self._options.conditions:
800  print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line'
801  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
802 
803  self.loadAndRemember(self.ConditionsDefaultCFF)
804  from Configuration.AlCa.GlobalTag import GlobalTag
805  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
806  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
807  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
808 
809 
810  def addCustomise(self,unsch=0):
811  """Include the customise code """
812 
813  custOpt=[]
814  if unsch==0:
815  for c in self._options.customisation_file:
816  custOpt.extend(c.split(","))
817  else:
818  for c in self._options.customisation_file_unsch:
819  custOpt.extend(c.split(","))
820 
821  custMap=DictTypes.SortedKeysDict()
822  for opt in custOpt:
823  if opt=='': continue
824  if opt.count('.')>1:
825  raise Exception("more than . in the specification:"+opt)
826  fileName=opt.split('.')[0]
827  if opt.count('.')==0: rest='customise'
828  else:
829  rest=opt.split('.')[1]
830  if rest=='py': rest='customise' #catch the case of --customise file.py
831 
832  if fileName in custMap:
833  custMap[fileName].extend(rest.split('+'))
834  else:
835  custMap[fileName]=rest.split('+')
836 
837  if len(custMap)==0:
838  final_snippet='\n'
839  else:
840  final_snippet='\n# customisation of the process.\n'
841 
842  allFcn=[]
843  for opt in custMap:
844  allFcn.extend(custMap[opt])
845  for fcn in allFcn:
846  if allFcn.count(fcn)!=1:
847  raise Exception("cannot specify twice "+fcn+" as a customisation method")
848 
849  for f in custMap:
850  # let python search for that package and do syntax checking at the same time
851  packageName = f.replace(".py","").replace("/",".")
852  __import__(packageName)
853  package = sys.modules[packageName]
854 
855  # now ask the package for its definition and pick .py instead of .pyc
856  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
857 
858  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
859  if self._options.inline_custom:
860  for line in file(customiseFile,'r'):
861  if "import FWCore.ParameterSet.Config" in line:
862  continue
863  final_snippet += line
864  else:
865  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
866  for fcn in custMap[f]:
867  print "customising the process with",fcn,"from",f
868  if not hasattr(package,fcn):
869  #bound to fail at run time
870  raise Exception("config "+f+" has no function "+fcn)
871  #execute the command
872  self.process=getattr(package,fcn)(self.process)
873  #and print it in the configuration
874  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
875  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
876 
877  if len(custMap)!=0:
878  final_snippet += '\n# End of customisation functions\n'
879 
880  ### now for a useful command
881  return final_snippet
882 
884  final_snippet='\n# Customisation from command line\n'
885  if self._options.customise_commands:
886  import string
887  for com in self._options.customise_commands.split('\\n'):
888  com=string.lstrip(com)
889  self.executeAndRemember(com)
890  final_snippet +='\n'+com
891 
892  return final_snippet
893 
894  #----------------------------------------------------------------------------
895  # here the methods to define the python includes for each step or
896  # conditions
897  #----------------------------------------------------------------------------
898  def define_Configs(self):
899  if len(self.stepMap):
900  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
901  if self._options.particleTable not in defaultOptions.particleTableList:
902  print 'Invalid particle table provided. Options are:'
903  print defaultOptions.particleTable
904  sys.exit(-1)
905  else:
906  if len(self.stepMap):
907  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
908 
909  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
910 
911  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
912  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
913  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
914  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
915  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
916  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
917  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
918  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
919  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
920  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
921  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
922  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
923  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
924  self.EIDefaultCFF=None
925  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
926  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
927  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
928  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
929  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
930  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
931  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
932  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
933  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
934  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
935  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
936 
937  if "DATAMIX" in self.stepMap.keys():
938  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
939  if self._options.datamix == 'PreMix':
940  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDMPreMix_cff"
941  else:
942  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
943  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
944  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
945 
946  if "DIGIPREMIX" in self.stepMap.keys():
947  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_PreMix_cff"
948  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawPreMixing_cff"
949  self.L1EMDefaultCFF="Configuration/StandardSequences/SimL1EmulatorPreMix_cff"
950 
951  self.ALCADefaultSeq=None
952  self.LHEDefaultSeq='externalLHEProducer'
953  self.GENDefaultSeq='pgen'
954  self.SIMDefaultSeq='psim'
955  self.DIGIDefaultSeq='pdigi'
956  self.DIGIPREMIXDefaultSeq='pdigi'
957  self.DIGIPREMIX_S2DefaultSeq='pdigi'
958  self.DATAMIXDefaultSeq=None
959  self.DIGI2RAWDefaultSeq='DigiToRaw'
960  self.HLTDefaultSeq='GRun'
961  self.L1DefaultSeq=None
962  self.L1REPACKDefaultSeq='GT'
963  self.HARVESTINGDefaultSeq=None
964  self.ALCAHARVESTDefaultSeq=None
965  self.CFWRITERDefaultSeq=None
966  self.RAW2DIGIDefaultSeq='RawToDigi'
967  self.L1RecoDefaultSeq='L1Reco'
968  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
969  if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
970  self.RECODefaultSeq='reconstruction'
971  else:
972  self.RECODefaultSeq='reconstruction_fromRECO'
973 
974  self.EIDefaultSeq='top'
975  self.POSTRECODefaultSeq=None
976  self.L1HwValDefaultSeq='L1HwVal'
977  self.DQMDefaultSeq='DQMOffline'
978  self.VALIDATIONDefaultSeq=''
979  self.ENDJOBDefaultSeq='endOfProcess'
980  self.REPACKDefaultSeq='DigiToRawRepack'
981  self.PATDefaultSeq='miniAOD'
982 
983  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
984 
985  if not self._options.beamspot:
986  self._options.beamspot=VtxSmearedDefaultKey
987 
988  # if its MC then change the raw2digi
989  if self._options.isMC==True:
990  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
991  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
992  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
993  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
994  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
995  else:
996  self._options.beamspot = None
997 
998  #patch for gen, due to backward incompatibility
999  if 'reGEN' in self.stepMap:
1000  self.GENDefaultSeq='fixGenInfo'
1001 
1002  if self._options.scenario=='cosmics':
1003  self._options.pileup='Cosmics'
1004  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1005  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1006  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1007  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1008  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1009  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1010  if self._options.isMC==True:
1011  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1012  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1013  self.RECODefaultSeq='reconstructionCosmics'
1014  self.DQMDefaultSeq='DQMOfflineCosmics'
1015 
1016  if self._options.scenario=='HeavyIons':
1017  if not self._options.beamspot:
1018  self._options.beamspot=VtxSmearedHIDefaultKey
1019  self.HLTDefaultSeq = 'HIon'
1020  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1021  self.VALIDATIONDefaultSeq=''
1022  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1023  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1024  self.RECODefaultSeq='reconstructionHeavyIons'
1025  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1026  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1027  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1028  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1029  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1030  if self._options.isMC==True:
1031  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1032 
1033 
1034  self.RAW2RECODefaultSeq=','.join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1035 
1036  self.USERDefaultSeq='user'
1037  self.USERDefaultCFF=None
1038 
1039  # the magnetic field
1040  if self._options.isData:
1041  if self._options.magField==defaultOptions.magField:
1042  print "magnetic field option forced to: AutoFromDBCurrent"
1043  self._options.magField='AutoFromDBCurrent'
1044  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1045  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1046 
1047  # the geometry
1048  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1049  self.geometryDBLabel=None
1050  simGeometry=''
1051  if self._options.fast:
1052  if 'start' in self._options.conditions.lower():
1053  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1054  else:
1055  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1056  else:
1057  def inGeometryKeys(opt):
1058  from Configuration.StandardSequences.GeometryConf import GeometryConf
1059  if opt in GeometryConf:
1060  return GeometryConf[opt]
1061  else:
1062  return opt
1063 
1064  geoms=self._options.geometry.split(',')
1065  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1066  if len(geoms)==2:
1067  #may specify the reco geometry
1068  if '/' in geoms[1] or '_cff' in geoms[1]:
1069  self.GeometryCFF=geoms[1]
1070  else:
1071  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1072 
1073  if (geoms[0].startswith('DB:')):
1074  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1075  self.geometryDBLabel=geoms[0][3:]
1076  print "with DB:"
1077  else:
1078  if '/' in geoms[0] or '_cff' in geoms[0]:
1079  self.SimGeometryCFF=geoms[0]
1080  else:
1081  simGeometry=geoms[0]
1082  if self._options.gflash==True:
1083  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1084  else:
1085  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1086 
1087  # synchronize the geometry configuration and the FullSimulation sequence to be used
1088  if simGeometry not in defaultOptions.geometryExtendedOptions:
1089  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1090 
1091  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1092  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1093  self._options.beamspot='NoSmear'
1094 
1095  # fastsim requires some changes to the default cff files and sequences
1096  if self._options.fast:
1097  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1098  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1099  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1100  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1101  self.DQMOFFLINEDefaultCFF="FastSimulation.Configuration.DQMOfflineMC_cff"
1102 
1103  # Mixing
1104  if self._options.pileup=='default':
1105  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1106  self._options.pileup=MixingDefaultKey
1107 
1108 
1109  #not driven by a default cff anymore
1110  if self._options.isData:
1111  self._options.pileup=None
1112 
1113 
1114  self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1115 
1116  # for alca, skims, etc
1117  def addExtraStream(self,name,stream,workflow='full'):
1118  # define output module and go from there
1119  output = cms.OutputModule("PoolOutputModule")
1120  if stream.selectEvents.parameters_().__len__()!=0:
1121  output.SelectEvents = stream.selectEvents
1122  else:
1123  output.SelectEvents = cms.untracked.PSet()
1124  output.SelectEvents.SelectEvents=cms.vstring()
1125  if isinstance(stream.paths,tuple):
1126  for path in stream.paths:
1127  output.SelectEvents.SelectEvents.append(path.label())
1128  else:
1129  output.SelectEvents.SelectEvents.append(stream.paths.label())
1130 
1131 
1132 
1133  if isinstance(stream.content,str):
1134  evtPset=getattr(self.process,stream.content)
1135  for p in evtPset.parameters_():
1136  setattr(output,p,getattr(evtPset,p))
1137  if not self._options.inlineEventContent:
1138  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1139  return label
1140  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1141  else:
1142  output.outputCommands = stream.content
1143 
1144 
1145  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1146 
1147  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1148  filterName = cms.untracked.string(stream.name))
1149 
1150  if self._options.filtername:
1151  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1152 
1153  #add an automatic flushing to limit memory consumption
1154  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1155 
1156  if workflow in ("producers,full"):
1157  if isinstance(stream.paths,tuple):
1158  for path in stream.paths:
1159  self.schedule.append(path)
1160  else:
1161  self.schedule.append(stream.paths)
1162 
1163 
1164  # in case of relvals we don't want to have additional outputs
1165  if (not self._options.relval) and workflow in ("full","output"):
1166  self.additionalOutputs[name] = output
1167  setattr(self.process,name,output)
1168 
1169  if workflow == 'output':
1170  # adjust the select events to the proper trigger results from previous process
1171  filterList = output.SelectEvents.SelectEvents
1172  for i, filter in enumerate(filterList):
1173  filterList[i] = filter+":"+self._options.triggerResultsProcess
1174 
1175  return output
1176 
1177  #----------------------------------------------------------------------------
1178  # here the methods to create the steps. Of course we are doing magic here ;)
1179  # prepare_STEPNAME modifies self.process and what else's needed.
1180  #----------------------------------------------------------------------------
1181 
1182  def loadDefaultOrSpecifiedCFF(self, sequence,defaultCFF):
1183  if ( len(sequence.split('.'))==1 ):
1184  l=self.loadAndRemember(defaultCFF)
1185  elif ( len(sequence.split('.'))==2 ):
1186  l=self.loadAndRemember(sequence.split('.')[0])
1187  sequence=sequence.split('.')[1]
1188  else:
1189  print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a"
1190  print sequence,"not recognized"
1191  raise
1192  return l
1193 
1194  def scheduleSequence(self,seq,prefix,what='Path'):
1195  if '*' in seq:
1196  #create only one path with all sequences in it
1197  for i,s in enumerate(seq.split('*')):
1198  if i==0:
1199  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1200  else:
1201  p=getattr(self.process,prefix)
1202  p+=getattr(self.process, s)
1203  self.schedule.append(getattr(self.process,prefix))
1204  return
1205  else:
1206  #create as many path as many sequences
1207  if not '+' in seq:
1208  if self.nextScheduleIsConditional:
1209  self.conditionalPaths.append(prefix)
1210  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1211  self.schedule.append(getattr(self.process,prefix))
1212  else:
1213  for i,s in enumerate(seq.split('+')):
1214  sn=prefix+'%d'%(i)
1215  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1216  self.schedule.append(getattr(self.process,sn))
1217  return
1218 
1219  def scheduleSequenceAtEnd(self,seq,prefix):
1220  self.scheduleSequence(seq,prefix,what='EndPath')
1221  return
1222 
1223  def prepare_ALCAPRODUCER(self, sequence = None):
1224  self.prepare_ALCA(sequence, workflow = "producers")
1225 
1226  def prepare_ALCAOUTPUT(self, sequence = None):
1227  self.prepare_ALCA(sequence, workflow = "output")
1228 
1229  def prepare_ALCA(self, sequence = None, workflow = 'full'):
1230  """ Enrich the process with alca streams """
1231  alcaConfig=self.loadDefaultOrSpecifiedCFF(sequence,self.ALCADefaultCFF)
1232  sequence = sequence.split('.')[-1]
1233 
1234  # decide which ALCA paths to use
1235  alcaList = sequence.split("+")
1236  maxLevel=0
1237  from Configuration.AlCa.autoAlca import autoAlca
1238  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1239  self.expandMapping(alcaList,autoAlca)
1240  self.AlCaPaths=[]
1241  for name in alcaConfig.__dict__:
1242  alcastream = getattr(alcaConfig,name)
1243  shortName = name.replace('ALCARECOStream','')
1244  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1245  output = self.addExtraStream(name,alcastream, workflow = workflow)
1246  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1247  self.AlCaPaths.append(shortName)
1248  if 'DQM' in alcaList:
1249  if not self._options.inlineEventContent and hasattr(self.process,name):
1250  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1251  else:
1252  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1253 
1254  #rename the HLT process name in the alca modules
1255  if self._options.hltProcess or 'HLT' in self.stepMap:
1256  if isinstance(alcastream.paths,tuple):
1257  for path in alcastream.paths:
1258  self.renameHLTprocessInSequence(path.label())
1259  else:
1260  self.renameHLTprocessInSequence(alcastream.paths.label())
1261 
1262  for i in range(alcaList.count(shortName)):
1263  alcaList.remove(shortName)
1264 
1265  # DQM needs a special handling
1266  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1267  path = getattr(alcaConfig,name)
1268  self.schedule.append(path)
1269  alcaList.remove('DQM')
1270 
1271  if isinstance(alcastream,cms.Path):
1272  #black list the alca path so that they do not appear in the cfg
1273  self.blacklist_paths.append(alcastream)
1274 
1275 
1276  if len(alcaList) != 0:
1277  available=[]
1278  for name in alcaConfig.__dict__:
1279  alcastream = getattr(alcaConfig,name)
1280  if isinstance(alcastream,cms.FilteredStream):
1281  available.append(name.replace('ALCARECOStream',''))
1282  print "The following alcas could not be found "+str(alcaList)
1283  print "available ",available
1284  #print "verify your configuration, ignoring for now"
1285  raise Exception("The following alcas could not be found "+str(alcaList))
1286 
1287  def prepare_LHE(self, sequence = None):
1288  #load the fragment
1289  ##make it loadable
1290  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1291  print "Loading lhe fragment from",loadFragment
1292  __import__(loadFragment)
1293  self.process.load(loadFragment)
1294  ##inline the modules
1295  self._options.inlineObjets+=','+sequence
1296 
1297  getattr(self.process,sequence).nEvents = int(self._options.number)
1298 
1299  #schedule it
1300  self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1301  self.excludedPaths.append("lhe_step")
1302  self.schedule.append( self.process.lhe_step )
1303 
1304  def prepare_GEN(self, sequence = None):
1305  """ load the fragment of generator configuration """
1306  loadFailure=False
1307  #remove trailing .py
1308  #support old style .cfi by changing into something.cfi into something_cfi
1309  #remove python/ from the name
1310  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1311  #standard location of fragments
1312  if not '/' in loadFragment:
1313  loadFragment='Configuration.Generator.'+loadFragment
1314  else:
1315  loadFragment=loadFragment.replace('/','.')
1316  try:
1317  print "Loading generator fragment from",loadFragment
1318  __import__(loadFragment)
1319  except:
1320  loadFailure=True
1321  #if self.process.source and self.process.source.type_()=='EmptySource':
1322  if not (self._options.filein or self._options.dasquery):
1323  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1324 
1325  if not loadFailure:
1326  generatorModule=sys.modules[loadFragment]
1327  genModules=generatorModule.__dict__
1328  #remove lhe producer module since this should have been
1329  #imported instead in the LHE step
1330  if self.LHEDefaultSeq in genModules:
1331  del genModules[self.LHEDefaultSeq]
1332 
1333  if self._options.hideGen:
1334  self.loadAndRemember(loadFragment)
1335  else:
1336  self.process.load(loadFragment)
1337  # expose the objects from that fragment to the configuration
1338  import FWCore.ParameterSet.Modules as cmstypes
1339  for name in genModules:
1340  theObject = getattr(generatorModule,name)
1341  if isinstance(theObject, cmstypes._Module):
1342  self._options.inlineObjets=name+','+self._options.inlineObjets
1343  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1344  self._options.inlineObjets+=','+name
1345 
1346  if sequence == self.GENDefaultSeq or sequence == 'pgen_genonly':
1347  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1348  self.productionFilterSequence = 'ProductionFilterSequence'
1349  elif 'generator' in genModules:
1350  self.productionFilterSequence = 'generator'
1351 
1352  """ Enrich the schedule with the rest of the generation step """
1353  self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1354  genSeqName=sequence.split('.')[-1]
1355 
1356  if True:
1357  try:
1358  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1359  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1360  self.loadAndRemember(cffToBeLoaded)
1361  except ImportError:
1362  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1363 
1364  if self._options.scenario == 'HeavyIons':
1365  if self._options.pileup=='HiMixGEN':
1366  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1367  else:
1368  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1369 
1370  self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1371  self.schedule.append(self.process.generation_step)
1372 
1373  #register to the genstepfilter the name of the path (static right now, but might evolve)
1374  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1375 
1376  if 'reGEN' in self.stepMap:
1377  #stop here
1378  return
1379 
1380  """ Enrich the schedule with the summary of the filter step """
1381  #the gen filter in the endpath
1382  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1383  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1384  return
1385 
1386  def prepare_SIM(self, sequence = None):
1387  """ Enrich the schedule with the simulation step"""
1388  self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1389  if not self._options.fast:
1390  if self._options.gflash==True:
1391  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1392 
1393  if self._options.magField=='0T':
1394  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1395  else:
1396  if self._options.magField=='0T':
1397  self.executeAndRemember("process.famosSimHits.UseMagneticField = cms.bool(False)")
1398 
1399  self.scheduleSequence(sequence.split('.')[-1],'simulation_step')
1400  return
1401 
1402  def prepare_DIGI(self, sequence = None):
1403  """ Enrich the schedule with the digitisation step"""
1404  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1405 
1406  if self._options.gflash==True:
1407  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1408 
1409  if sequence == 'pdigi_valid' or sequence == 'pdigi_hi':
1410  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1411 
1412  if sequence != 'pdigi_nogen' and sequence != 'pdigi_valid_nogen' and not self.process.source.type_()=='EmptySource':
1413  if self._options.inputEventContent=='':
1414  self._options.inputEventContent='REGEN'
1415  else:
1416  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1417 
1418 
1419  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1420  return
1421 
1422  def prepare_DIGIPREMIX(self, sequence = None):
1423  """ Enrich the schedule with the digitisation step"""
1424  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1425 
1426  self.loadAndRemember("SimGeneral/MixingModule/digi_noNoise_cfi")
1427 
1428  if sequence == 'pdigi_valid':
1429  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1430  else:
1431  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1432 
1433  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1434  return
1435 
1436  def prepare_DIGIPREMIX_S2(self, sequence = None):
1437  """ Enrich the schedule with the digitisation step"""
1438  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1439 
1440  self.loadAndRemember("SimGeneral/MixingModule/digi_MixPreMix_cfi")
1441 
1442 
1443  if sequence == 'pdigi_valid':
1444  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1445  else:
1446  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1447 
1448  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1449  return
1450 
1451  def prepare_CFWRITER(self, sequence = None):
1452  """ Enrich the schedule with the crossing frame writer step"""
1453  self.loadAndRemember(self.CFWRITERDefaultCFF)
1454  self.scheduleSequence('pcfw','cfwriter_step')
1455  return
1456 
1457  def prepare_DATAMIX(self, sequence = None):
1458  """ Enrich the schedule with the digitisation step"""
1459  self.loadAndRemember(self.DATAMIXDefaultCFF)
1460  self.scheduleSequence('pdatamix','datamixing_step')
1461 
1462  if self._options.pileup_input:
1463  theFiles=''
1464  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1465  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1466  elif self._options.pileup_input.startswith("filelist:"):
1467  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1468  else:
1469  theFiles=self._options.pileup_input.split(',')
1470  #print theFiles
1471  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1472 
1473  return
1474 
1475  def prepare_DIGI2RAW(self, sequence = None):
1476  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGI2RAWDefaultCFF)
1477  self.scheduleSequence(sequence.split('.')[-1],'digi2raw_step')
1478  if "DIGIPREMIX" in self.stepMap.keys():
1479  self.executeAndRemember("process.esDigiToRaw.Label = cms.string('mix')") ##terrible hack - bypass zero suppression
1480  self.executeAndRemember("process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')") ##special readout mode for StripTracker
1481 
1482  return
1483 
1484  def prepare_REPACK(self, sequence = None):
1485  self.loadDefaultOrSpecifiedCFF(sequence,self.REPACKDefaultCFF)
1486  self.scheduleSequence(sequence.split('.')[-1],'digi2repack_step')
1487  return
1488 
1489  def prepare_L1(self, sequence = None):
1490  """ Enrich the schedule with the L1 simulation step"""
1491  assert(sequence == None)
1492  self.loadAndRemember(self.L1EMDefaultCFF)
1493  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1494  return
1495 
1496  def prepare_L1REPACK(self, sequence = None):
1497  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1498  supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT']
1499  if sequence in supported:
1500  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1501  if self._options.scenario == 'HeavyIons':
1502  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1503  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1504  else:
1505  print "L1REPACK with '",sequence,"' is not supported! Supported choices are: ",supported
1506  raise Exception('unsupported feature')
1507 
1508 
1509  def prepare_HLT(self, sequence = None):
1510  """ Enrich the schedule with the HLT simulation step"""
1511  if not sequence:
1512  print "no specification of the hlt menu has been given, should never happen"
1513  raise Exception('no HLT sequence provided')
1514 
1515  if '@' in sequence:
1516  # case where HLT:@something was provided
1517  from Configuration.HLT.autoHLT import autoHLT
1518  key = sequence[1:]
1519  if key in autoHLT:
1520  sequence = autoHLT[key]
1521  else:
1522  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1523 
1524  if ',' in sequence:
1525  #case where HLT:something:something was provided
1526  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1527  optionsForHLT = {}
1528  if self._options.scenario == 'HeavyIons':
1529  optionsForHLT['type'] = 'HIon'
1530  else:
1531  optionsForHLT['type'] = 'GRun'
1532  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.iteritems())
1533  if sequence == 'run,fromSource':
1534  if hasattr(self.process.source,'firstRun'):
1535  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1536  elif hasattr(self.process.source,'setRunNumber'):
1537  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1538  else:
1539  raise Exception('Cannot replace menu to load %s'%(sequence))
1540  else:
1541  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(sequence.replace(',',':'),optionsForHLTConfig))
1542  else:
1543  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % sequence)
1544 
1545  if self._options.isMC:
1546  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1547 
1548  if self._options.name != 'HLT':
1549  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1550  self.additionalCommands.append('process = ProcessName(process)')
1551  self.additionalCommands.append('')
1552  from HLTrigger.Configuration.CustomConfigs import ProcessName
1553  self.process = ProcessName(self.process)
1554 
1555  self.schedule.append(self.process.HLTSchedule)
1556  [self.blacklist_paths.append(path) for path in self.process.HLTSchedule if isinstance(path,(cms.Path,cms.EndPath))]
1557 
1558  #this is a fake, to be removed with fastim migration and HLT menu dump
1559  if self._options.fast:
1560  if not hasattr(self.process,'HLTEndSequence'):
1561  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1562 
1563 
1564  def prepare_RAW2RECO(self, sequence = None):
1565  if ','in sequence:
1566  seqReco=sequence.split(',')[1]
1567  seqDigi=sequence.split(',')[0]
1568  else:
1569  print "RAW2RECO requires two specifications",sequence,"insufficient"
1570 
1571  self.prepare_RAW2DIGI(seqDigi)
1572  self.prepare_RECO(seqReco)
1573  return
1574 
1575  def prepare_RAW2DIGI(self, sequence = "RawToDigi"):
1576  self.loadDefaultOrSpecifiedCFF(sequence,self.RAW2DIGIDefaultCFF)
1577  self.scheduleSequence(sequence,'raw2digi_step')
1578  # if self._options.isRepacked:
1579  #self.renameInputTagsInSequence(sequence)
1580  return
1581 
1582  def prepare_PATFILTER(self, sequence=None):
1583  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1584  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1585  for filt in allMetFilterPaths:
1586  self.schedule.append(getattr(self.process,'Flag_'+filt))
1587 
1588  def prepare_L1HwVal(self, sequence = 'L1HwVal'):
1589  ''' Enrich the schedule with L1 HW validation '''
1590  self.loadDefaultOrSpecifiedCFF(sequence,self.L1HwValDefaultCFF)
1591  #self.scheduleSequence(sequence.split('.')[-1],'l1hwval_step')
1592  print '\n\n\n DEPRECATED this has no action \n\n\n'
1593  return
1594 
1595  def prepare_L1Reco(self, sequence = "L1Reco"):
1596  ''' Enrich the schedule with L1 reconstruction '''
1597  self.loadDefaultOrSpecifiedCFF(sequence,self.L1RecoDefaultCFF)
1598  self.scheduleSequence(sequence.split('.')[-1],'L1Reco_step')
1599  return
1600 
1601  def prepare_L1TrackTrigger(self, sequence = "L1TrackTrigger"):
1602  ''' Enrich the schedule with L1 reconstruction '''
1604  self.scheduleSequence(sequence.split('.')[-1],'L1TrackTrigger_step')
1605  return
1606 
1607  def prepare_FILTER(self, sequence = None):
1608  ''' Enrich the schedule with a user defined filter sequence '''
1609  ## load the relevant part
1610  filterConfig=self.load(sequence.split('.')[0])
1611  filterSeq=sequence.split('.')[-1]
1612  ## print it in the configuration
1614  def __init__(self):
1615  self.inliner=''
1616  pass
1617  def enter(self,visitee):
1618  try:
1619  label=visitee.label()
1620  ##needs to be in reverse order
1621  self.inliner=label+','+self.inliner
1622  except:
1623  pass
1624  def leave(self,v): pass
1625 
1626  expander=PrintAllModules()
1627  getattr(self.process,filterSeq).visit( expander )
1628  self._options.inlineObjets+=','+expander.inliner
1629  self._options.inlineObjets+=','+filterSeq
1630 
1631  ## put the filtering path in the schedule
1632  self.scheduleSequence(filterSeq,'filtering_step')
1633  self.nextScheduleIsConditional=True
1634  ## put it before all the other paths
1635  self.productionFilterSequence = filterSeq
1636 
1637  return
1638 
1639  def prepare_RECO(self, sequence = "reconstruction"):
1640  ''' Enrich the schedule with reconstruction '''
1641  self.loadDefaultOrSpecifiedCFF(sequence,self.RECODefaultCFF)
1642  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_step')
1643  return
1644 
1645  def prepare_RECOBEFMIX(self, sequence = "reconstruction"):
1646  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1647  if not self._options.fast:
1648  print "ERROR: this step is only implemented for FastSim"
1649  sys.exit()
1650  self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1651  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_befmix_step')
1652  return
1653 
1654  def prepare_PAT(self, sequence = "miniAOD"):
1655  ''' Enrich the schedule with PAT '''
1656  self.prepare_PATFILTER(self)
1657  self.loadDefaultOrSpecifiedCFF(sequence,self.PATDefaultCFF)
1658  self.labelsToAssociate.append('patTask')
1659  if not self._options.runUnscheduled:
1660  raise Exception("MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1661  if self._options.isData:
1662  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1663  else:
1664  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1665  if self._options.fast:
1666  self._options.customisation_file_unsch.insert(1,"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff.miniAOD_customizeMETFiltersFastSim")
1667 
1668  if self._options.hltProcess:
1669  if len(self._options.customise_commands) > 1:
1670  self._options.customise_commands = self._options.customise_commands + " \n"
1671  self._options.customise_commands = self._options.customise_commands + "process.patTrigger.processName = \""+self._options.hltProcess+"\"\n"
1672  self._options.customise_commands = self._options.customise_commands + "process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1673 
1674 # self.renameHLTprocessInSequence(sequence)
1675 
1676  return
1677 
1678  def prepare_EI(self, sequence = None):
1679  ''' Enrich the schedule with event interpretation '''
1680  from Configuration.StandardSequences.EventInterpretation import EventInterpretation
1681  if sequence in EventInterpretation:
1682  self.EIDefaultCFF = EventInterpretation[sequence]
1683  sequence = 'EIsequence'
1684  else:
1685  raise Exception('Cannot set %s event interpretation'%( sequence) )
1686  self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1687  self.scheduleSequence(sequence.split('.')[-1],'eventinterpretaion_step')
1688  return
1689 
1690  def prepare_SKIM(self, sequence = "all"):
1691  ''' Enrich the schedule with skimming fragments'''
1692  skimConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.SKIMDefaultCFF)
1693  sequence = sequence.split('.')[-1]
1694 
1695  skimlist=sequence.split('+')
1696  ## support @Mu+DiJet+@Electron configuration via autoSkim.py
1697  from Configuration.Skimming.autoSkim import autoSkim
1698  self.expandMapping(skimlist,autoSkim)
1699 
1700  #print "dictionnary for skims:",skimConfig.__dict__
1701  for skim in skimConfig.__dict__:
1702  skimstream = getattr(skimConfig,skim)
1703  if isinstance(skimstream,cms.Path):
1704  #black list the alca path so that they do not appear in the cfg
1705  self.blacklist_paths.append(skimstream)
1706  if (not isinstance(skimstream,cms.FilteredStream)):
1707  continue
1708  shortname = skim.replace('SKIMStream','')
1709  if (sequence=="all"):
1710  self.addExtraStream(skim,skimstream)
1711  elif (shortname in skimlist):
1712  self.addExtraStream(skim,skimstream)
1713  #add a DQM eventcontent for this guy
1714  if self._options.datatier=='DQM':
1715  self.process.load(self.EVTCONTDefaultCFF)
1716  skimstreamDQM = cms.FilteredStream(
1717  responsible = skimstream.responsible,
1718  name = skimstream.name+'DQM',
1719  paths = skimstream.paths,
1720  selectEvents = skimstream.selectEvents,
1721  content = self._options.datatier+'EventContent',
1722  dataTier = cms.untracked.string(self._options.datatier)
1723  )
1724  self.addExtraStream(skim+'DQM',skimstreamDQM)
1725  for i in range(skimlist.count(shortname)):
1726  skimlist.remove(shortname)
1727 
1728 
1729 
1730  if (skimlist.__len__()!=0 and sequence!="all"):
1731  print 'WARNING, possible typo with SKIM:'+'+'.join(skimlist)
1732  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1733 
1734  def prepare_USER(self, sequence = None):
1735  ''' Enrich the schedule with a user defined sequence '''
1736  self.loadDefaultOrSpecifiedCFF(sequence,self.USERDefaultCFF)
1737  self.scheduleSequence(sequence.split('.')[-1],'user_step')
1738  return
1739 
1740  def prepare_POSTRECO(self, sequence = None):
1741  """ Enrich the schedule with the postreco step """
1742  self.loadAndRemember(self.POSTRECODefaultCFF)
1743  self.scheduleSequence('postreco_generator','postreco_step')
1744  return
1745 
1746 
1747  def prepare_VALIDATION(self, sequence = 'validation'):
1748  print sequence,"in preparing validation"
1749  self.loadDefaultOrSpecifiedCFF(sequence,self.VALIDATIONDefaultCFF)
1750  from Validation.Configuration.autoValidation import autoValidation
1751  #in case VALIDATION:something:somethingelse -> something,somethingelse
1752  sequence=sequence.split('.')[-1]
1753  if sequence.find(',')!=-1:
1754  prevalSeqName=sequence.split(',')[0].split('+')
1755  valSeqName=sequence.split(',')[1].split('+')
1756  self.expandMapping(prevalSeqName,autoValidation,index=0)
1757  self.expandMapping(valSeqName,autoValidation,index=1)
1758  else:
1759  if '@' in sequence:
1760  prevalSeqName=sequence.split('+')
1761  valSeqName=sequence.split('+')
1762  self.expandMapping(prevalSeqName,autoValidation,index=0)
1763  self.expandMapping(valSeqName,autoValidation,index=1)
1764  else:
1765  postfix=''
1766  if sequence:
1767  postfix='_'+sequence
1768  prevalSeqName=['prevalidation'+postfix]
1769  valSeqName=['validation'+postfix]
1770  if not hasattr(self.process,valSeqName[0]):
1771  prevalSeqName=['']
1772  valSeqName=[sequence]
1773 
1774  def NFI(index):
1775  ##name from index, required to keep backward compatibility
1776  if index==0:
1777  return ''
1778  else:
1779  return '%s'%index
1780 
1781 
1782  #rename the HLT process in validation steps
1783  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1784  for s in valSeqName+prevalSeqName:
1785  if s:
1787  for (i,s) in enumerate(prevalSeqName):
1788  if s:
1789  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1790  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1791 
1792  for (i,s) in enumerate(valSeqName):
1793  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1794  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1795 
1796  #needed in case the miniAODValidation sequence is run starting from AODSIM
1797  if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1798  return
1799 
1800  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1801  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1802  self._options.restoreRNDSeeds=True
1803 
1804  if not 'DIGI' in self.stepMap and not self._options.fast:
1805  self.executeAndRemember("process.mix.playback = True")
1806  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1807  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1808  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1809 
1810  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1811  #will get in the schedule, smoothly
1812  for (i,s) in enumerate(valSeqName):
1813  getattr(self.process,'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,'validation_step%s'%NFI(i))._seq
1814 
1815  return
1816 
1817 
1819  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1820  It will climb down within PSets, VPSets and VInputTags to find its target"""
1821  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1822  self._paramReplace = paramReplace
1823  self._paramSearch = paramSearch
1824  self._verbose = verbose
1825  self._whitelist = whitelist
1826 
1827  def doIt(self,pset,base):
1828  if isinstance(pset, cms._Parameterizable):
1829  for name in pset.parameters_().keys():
1830  # skip whitelisted parameters
1831  if name in self._whitelist:
1832  continue
1833  # if I use pset.parameters_().items() I get copies of the parameter values
1834  # so I can't modify the nested pset
1835  value = getattr(pset,name)
1836  type = value.pythonTypeName()
1837  if type in ('cms.PSet', 'cms.untracked.PSet'):
1838  self.doIt(value,base+"."+name)
1839  elif type in ('cms.VPSet', 'cms.untracked.VPSet'):
1840  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1841  elif type in ('cms.string', 'cms.untracked.string'):
1842  if value.value() == self._paramSearch:
1843  if self._verbose: print "set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace)
1844  setattr(pset, name,self._paramReplace)
1845  elif type in ('cms.VInputTag', 'cms.untracked.VInputTag'):
1846  for (i,n) in enumerate(value):
1847  if not isinstance(n, cms.InputTag):
1848  n=cms.InputTag(n)
1849  if n.processName == self._paramSearch:
1850  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1851  if self._verbose:print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace)
1852  setattr(n,"processName",self._paramReplace)
1853  value[i]=n
1854  elif type in ('cms.vstring', 'cms.untracked.vstring'):
1855  for (i,n) in enumerate(value):
1856  if n==self._paramSearch:
1857  getattr(pset,name)[i]=self._paramReplace
1858  elif type in ('cms.InputTag', 'cms.untracked.InputTag'):
1859  if value.processName == self._paramSearch:
1860  if self._verbose: print "set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace)
1861  setattr(getattr(pset, name),"processName",self._paramReplace)
1862 
1863  def enter(self,visitee):
1864  label = ''
1865  try:
1866  label = visitee.label()
1867  except AttributeError:
1868  label = '<Module not in a Process>'
1869  except:
1870  label = 'other execption'
1871  self.doIt(visitee, label)
1872 
1873  def leave(self,visitee):
1874  pass
1875 
1876  #visit a sequence to repalce all input tags
1877  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1878  print "Replacing all InputTag %s => %s"%(oldT,newT)
1879  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1880  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1881  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1882  if not loadMe in self.additionalCommands:
1883  self.additionalCommands.append(loadMe)
1884  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1885 
1886  #change the process name used to address HLT results in any sequence
1887  def renameHLTprocessInSequence(self,sequence,proc=None,HLTprocess='HLT'):
1888  if self._options.hltProcess:
1889  proc=self._options.hltProcess
1890  else:
1891  proc=self.process.name_()
1892  if proc==HLTprocess: return
1893  # look up all module in dqm sequence
1894  print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1895  getattr(self.process,sequence).visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess,proc,whitelist = ("subSystemFolder",)))
1896  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1897  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1898  self.additionalCommands.append('process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1899 
1900 
1901  def expandMapping(self,seqList,mapping,index=None):
1902  maxLevel=20
1903  level=0
1904  while '@' in repr(seqList) and level<maxLevel:
1905  level+=1
1906  for specifiedCommand in seqList:
1907  if specifiedCommand.startswith('@'):
1908  location=specifiedCommand[1:]
1909  if not location in mapping:
1910  raise Exception("Impossible to map "+location+" from "+repr(mapping))
1911  mappedTo=mapping[location]
1912  if index!=None:
1913  mappedTo=mappedTo[index]
1914  seqList.remove(specifiedCommand)
1915  seqList.extend(mappedTo.split('+'))
1916  break;
1917  if level==maxLevel:
1918  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
1919 
1920  def prepare_DQM(self, sequence = 'DQMOffline'):
1921  # this one needs replacement
1922 
1923  self.loadDefaultOrSpecifiedCFF(sequence,self.DQMOFFLINEDefaultCFF)
1924  sequenceList=sequence.split('.')[-1].split('+')
1925  postSequenceList=sequence.split('.')[-1].split('+')
1926  from DQMOffline.Configuration.autoDQM import autoDQM
1927  self.expandMapping(sequenceList,autoDQM,index=0)
1928  self.expandMapping(postSequenceList,autoDQM,index=1)
1929 
1930  if len(set(sequenceList))!=len(sequenceList):
1931  sequenceList=list(set(sequenceList))
1932  print "Duplicate entries for DQM:, using",sequenceList
1933 
1934  pathName='dqmoffline_step'
1935  for (i,sequence) in enumerate(sequenceList):
1936  if (i!=0):
1937  pathName='dqmoffline_%d_step'%(i)
1938 
1939  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
1940  self.renameHLTprocessInSequence(sequence)
1941 
1942  setattr(self.process,pathName, cms.EndPath( getattr(self.process,sequence ) ) )
1943  self.schedule.append(getattr(self.process,pathName))
1944 
1945  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1946  #will get in the schedule, smoothly
1947  getattr(self.process,pathName).insert(0,self.process.genstepfilter)
1948 
1949  pathName='dqmofflineOnPAT_step'
1950  for (i,sequence) in enumerate(postSequenceList):
1951  if (i!=0):
1952  pathName='dqmofflineOnPAT_%d_step'%(i)
1953 
1954  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1955  self.schedule.append(getattr(self.process,pathName))
1956 
1957  def prepare_HARVESTING(self, sequence = None):
1958  """ Enrich the process with harvesting step """
1959  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
1960  self.loadAndRemember(self.DQMSaverCFF)
1961 
1962  harvestingConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.HARVESTINGDefaultCFF)
1963  sequence = sequence.split('.')[-1]
1964 
1965  # decide which HARVESTING paths to use
1966  harvestingList = sequence.split("+")
1967  from DQMOffline.Configuration.autoDQM import autoDQM
1968  from Validation.Configuration.autoValidation import autoValidation
1969  import copy
1970  combined_mapping = copy.deepcopy( autoDQM )
1971  combined_mapping.update( autoValidation )
1972  self.expandMapping(harvestingList,combined_mapping,index=-1)
1973 
1974  if len(set(harvestingList))!=len(harvestingList):
1975  harvestingList=list(set(harvestingList))
1976  print "Duplicate entries for HARVESTING, using",harvestingList
1977 
1978  for name in harvestingList:
1979  if not name in harvestingConfig.__dict__:
1980  print name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1981  continue
1982  harvestingstream = getattr(harvestingConfig,name)
1983  if isinstance(harvestingstream,cms.Path):
1984  self.schedule.append(harvestingstream)
1985  self.blacklist_paths.append(harvestingstream)
1986  if isinstance(harvestingstream,cms.Sequence):
1987  setattr(self.process,name+"_step",cms.Path(harvestingstream))
1988  self.schedule.append(getattr(self.process,name+"_step"))
1989 
1990  self.scheduleSequence('DQMSaver','dqmsave_step')
1991  return
1992 
1993  def prepare_ALCAHARVEST(self, sequence = None):
1994  """ Enrich the process with AlCaHarvesting step """
1995  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
1996  sequence=sequence.split(".")[-1]
1997 
1998  # decide which AlcaHARVESTING paths to use
1999  harvestingList = sequence.split("+")
2000 
2001 
2002 
2003  from Configuration.AlCa.autoPCL import autoPCL
2004  self.expandMapping(harvestingList,autoPCL)
2005 
2006  for name in harvestingConfig.__dict__:
2007  harvestingstream = getattr(harvestingConfig,name)
2008  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2009  self.schedule.append(harvestingstream)
2010  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2011  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2012  harvestingList.remove(name)
2013  # append the common part at the end of the sequence
2014  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2015  self.schedule.append(lastStep)
2016 
2017  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2018  print "The following harvesting could not be found : ", harvestingList
2019  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2020 
2021 
2022 
2023  def prepare_ENDJOB(self, sequence = 'endOfProcess'):
2024  self.loadDefaultOrSpecifiedCFF(sequence,self.ENDJOBDefaultCFF)
2025  self.scheduleSequenceAtEnd(sequence.split('.')[-1],'endjob_step')
2026  return
2027 
2029  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2030  self.schedule.append(self.process.reconstruction)
2031 
2032 
2033  def build_production_info(self, evt_type, evtnumber):
2034  """ Add useful info for the production. """
2035  self.process.configurationMetadata=cms.untracked.PSet\
2036  (version=cms.untracked.string("$Revision: 1.19 $"),
2037  name=cms.untracked.string("Applications"),
2038  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2039  )
2040 
2041  self.addedObjects.append(("Production Info","configurationMetadata"))
2042 
2043 
2044  def prepare(self, doChecking = False):
2045  """ Prepare the configuration string and add missing pieces."""
2046 
2047  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2048  self.addMaxEvents()
2049  if self.with_input:
2050  self.addSource()
2051  self.addStandardSequences()
2052  ##adding standard sequences might change the inputEventContent option and therefore needs to be finalized after
2053  self.completeInputCommand()
2054  self.addConditions()
2055 
2056 
2057  outputModuleCfgCode=""
2058  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2059  outputModuleCfgCode=self.addOutput()
2060 
2061  self.addCommon()
2062 
2063  self.pythonCfgCode = "# Auto generated configuration file\n"
2064  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2065  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2066  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2067  if hasattr(self._options,"era") and self._options.era :
2068  self.pythonCfgCode += "from Configuration.StandardSequences.Eras import eras\n\n"
2069  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"'" # Start of the line, finished after the loop
2070  # Multiple eras can be specified in a comma seperated list
2071  for requestedEra in self._options.era.split(",") :
2072  self.pythonCfgCode += ",eras."+requestedEra
2073  self.pythonCfgCode += ")\n\n" # end of the line
2074  else :
2075  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"')\n\n"
2076 
2077  self.pythonCfgCode += "# import of standard configurations\n"
2078  for module in self.imports:
2079  self.pythonCfgCode += ("process.load('"+module+"')\n")
2080 
2081  # production info
2082  if not hasattr(self.process,"configurationMetadata"):
2083  self.build_production_info(self._options.evt_type, self._options.number)
2084  else:
2085  #the PSet was added via a load
2086  self.addedObjects.append(("Production Info","configurationMetadata"))
2087 
2088  self.pythonCfgCode +="\n"
2089  for comment,object in self.addedObjects:
2090  if comment!="":
2091  self.pythonCfgCode += "\n# "+comment+"\n"
2092  self.pythonCfgCode += dumpPython(self.process,object)
2093 
2094  # dump the output definition
2095  self.pythonCfgCode += "\n# Output definition\n"
2096  self.pythonCfgCode += outputModuleCfgCode
2097 
2098  # dump all additional outputs (e.g. alca or skim streams)
2099  self.pythonCfgCode += "\n# Additional output definition\n"
2100  #I do not understand why the keys are not normally ordered.
2101  nl=self.additionalOutputs.keys()
2102  nl.sort()
2103  for name in nl:
2104  output = self.additionalOutputs[name]
2105  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2106  tmpOut = cms.EndPath(output)
2107  setattr(self.process,name+'OutPath',tmpOut)
2108  self.schedule.append(tmpOut)
2109 
2110  # dump all additional commands
2111  self.pythonCfgCode += "\n# Other statements\n"
2112  for command in self.additionalCommands:
2113  self.pythonCfgCode += command + "\n"
2114 
2115  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2116  for object in self._options.inlineObjets.split(','):
2117  if not object:
2118  continue
2119  if not hasattr(self.process,object):
2120  print 'cannot inline -'+object+'- : not known'
2121  else:
2122  self.pythonCfgCode +='\n'
2123  self.pythonCfgCode +=dumpPython(self.process,object)
2124 
2125  # dump all paths
2126  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2127  for path in self.process.paths:
2128  if getattr(self.process,path) not in self.blacklist_paths:
2129  self.pythonCfgCode += dumpPython(self.process,path)
2130 
2131  for endpath in self.process.endpaths:
2132  if getattr(self.process,endpath) not in self.blacklist_paths:
2133  self.pythonCfgCode += dumpPython(self.process,endpath)
2134 
2135  # dump the schedule
2136  self.pythonCfgCode += "\n# Schedule definition\n"
2137  result = "process.schedule = cms.Schedule("
2138 
2139  # handling of the schedule
2140  self.process.schedule = cms.Schedule()
2141  for item in self.schedule:
2142  if not isinstance(item, cms.Schedule):
2143  self.process.schedule.append(item)
2144  else:
2145  self.process.schedule.extend(item)
2146 
2147  if hasattr(self.process,"HLTSchedule"):
2148  beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2149  afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2150  pathNames = ['process.'+p.label_() for p in beforeHLT]
2151  result += ','.join(pathNames)+')\n'
2152  result += 'process.schedule.extend(process.HLTSchedule)\n'
2153  pathNames = ['process.'+p.label_() for p in afterHLT]
2154  result += 'process.schedule.extend(['+','.join(pathNames)+'])\n'
2155  else:
2156  pathNames = ['process.'+p.label_() for p in self.schedule]
2157  result ='process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2158 
2159  self.pythonCfgCode += result
2160 
2161  for labelToAssociate in self.labelsToAssociate:
2162  self.process.schedule.associate(getattr(self.process, labelToAssociate))
2163  self.pythonCfgCode += 'process.schedule.associate(process.' + labelToAssociate + ')\n'
2164 
2165  from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
2166  associatePatAlgosToolsTask(self.process)
2167  self.pythonCfgCode+="from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n"
2168  self.pythonCfgCode+="associatePatAlgosToolsTask(process)\n"
2169 
2170  if self._options.nThreads is not "1":
2171  self.pythonCfgCode +="\n"
2172  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2173  self.pythonCfgCode +="process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+")\n"
2174  self.pythonCfgCode +="process.options.numberOfStreams=cms.untracked.uint32(0)\n"
2175  #repacked version
2176  if self._options.isRepacked:
2177  self.pythonCfgCode +="\n"
2178  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2179  self.pythonCfgCode +="MassReplaceInputTag(process)\n"
2180  MassReplaceInputTag(self.process)
2181 
2182  # special treatment in case of production filter sequence 2/2
2183  if self.productionFilterSequence:
2184  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2185  self.pythonCfgCode +='for path in process.paths:\n'
2186  if len(self.conditionalPaths):
2187  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2188  if len(self.excludedPaths):
2189  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2190  self.pythonCfgCode +='\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2191  pfs = getattr(self.process,self.productionFilterSequence)
2192  for path in self.process.paths:
2193  if not path in self.conditionalPaths: continue
2194  if path in self.excludedPaths: continue
2195  getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2196 
2197 
2198  # dump customise fragment
2199  self.pythonCfgCode += self.addCustomise()
2200 
2201  if self._options.runUnscheduled:
2202  # prune and delete paths
2203  #this is not supporting the blacklist at this point since I do not understand it
2204  self.pythonCfgCode+="#do not add changes to your config after this point (unless you know what you are doing)\n"
2205  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import convertToUnscheduled\n"
2206  self.pythonCfgCode+="process=convertToUnscheduled(process)\n"
2207 
2208  from FWCore.ParameterSet.Utilities import convertToUnscheduled
2209  self.process=convertToUnscheduled(self.process)
2210 
2211  self.pythonCfgCode += self.addCustomise(1)
2212 
2213  self.pythonCfgCode += self.addCustomiseCmdLine()
2214 
2215  # Temporary hack to put the early delete customization after
2216  # everything else
2217  #
2218  # FIXME: remove when no longer needed
2219  self.pythonCfgCode += "\n# Add early deletion of temporary data products to reduce peak memory need\n"
2220  self.pythonCfgCode += "from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2221  self.pythonCfgCode += "process = customiseEarlyDelete(process)\n"
2222  self.pythonCfgCode += "# End adding early deletion\n"
2223  from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
2224  self.process = customiseEarlyDelete(self.process)
2225 
2226 
2227  # make the .io file
2228 
2229  if self._options.io:
2230  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2231  if not self._options.io.endswith('.io'): self._option.io+='.io'
2232  io=open(self._options.io,'w')
2233  ioJson={}
2234  if hasattr(self.process.source,"fileNames"):
2235  if len(self.process.source.fileNames.value()):
2236  ioJson['primary']=self.process.source.fileNames.value()
2237  if hasattr(self.process.source,"secondaryFileNames"):
2238  if len(self.process.source.secondaryFileNames.value()):
2239  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2240  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2241  ioJson['pileup']=self._options.pileup_input[4:]
2242  for (o,om) in self.process.outputModules_().items():
2243  ioJson[o]=om.fileName.value()
2244  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2245  if self.productionFilterSequence:
2246  ioJson['filter']=self.productionFilterSequence
2247  import json
2248  io.write(json.dumps(ioJson))
2249  return
2250 
def load(self, includeFile)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:34
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
Definition: MassReplace.py:71
def prepare_L1REPACK(self, sequence=None)
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def doNotInlineEventContent(instance, label="cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)")
event content
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def ProcessName(process)
Definition: CustomConfigs.py:8
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_DIGIPREMIX(self, sequence=None)
def prepare_RECO(self, sequence="reconstruction")
put the filtering path in the schedule
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def addCustomise(self, unsch=0)
def prepare_DIGIPREMIX_S2(self, sequence=None)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
now for a useful command
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
Definition: HCMethods.h:49
def prepare_DIGI(self, sequence=None)
def convertToUnscheduled(proc)
Definition: Utilities.py:45
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
inliner
needs to be in reverse order
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def prepare_ALCAHARVEST(self, sequence=None)
def defineMixing(dict)
Definition: Mixing.py:177
def dumpPython(process, name)
def miniAOD_customizeOutput(out)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
Definition: helpers.py:23
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
double split
Definition: MVATrainer.cc:139
def completeInputCommand(self)
add the corresponding input content
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def prepare_PAT(self, sequence="miniAOD")