CMS 3D CMS Logo

ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python
2 
3 __version__ = "$Revision: 1.19 $"
4 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
5 
6 import FWCore.ParameterSet.Config as cms
7 from FWCore.ParameterSet.Modules import _Module
8 
9 # The following import is provided for backward compatibility reasons.
10 # The function used to be defined in this file.
11 from FWCore.ParameterSet.MassReplace import massReplaceInputTag as MassReplaceInputTag
12 
13 import sys
14 import re
15 import collections
16 from subprocess import Popen,PIPE
17 import FWCore.ParameterSet.DictTypes as DictTypes
18 class Options:
19  pass
20 
21 # the canonical defaults
22 defaultOptions = Options()
23 defaultOptions.datamix = 'DataOnSim'
24 defaultOptions.isMC=False
25 defaultOptions.isData=True
26 defaultOptions.step=''
27 defaultOptions.pileup='NoPileUp'
28 defaultOptions.pileup_input = None
29 defaultOptions.pileup_dasoption = ''
30 defaultOptions.geometry = 'SimDB'
31 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
32 defaultOptions.magField = ''
33 defaultOptions.conditions = None
34 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
35 defaultOptions.harvesting= 'AtRunEnd'
36 defaultOptions.gflash = False
37 defaultOptions.number = -1
38 defaultOptions.number_out = None
39 defaultOptions.arguments = ""
40 defaultOptions.name = "NO NAME GIVEN"
41 defaultOptions.evt_type = ""
42 defaultOptions.filein = ""
43 defaultOptions.dasquery=""
44 defaultOptions.dasoption=""
45 defaultOptions.secondfilein = ""
46 defaultOptions.customisation_file = []
47 defaultOptions.customisation_file_unsch = []
48 defaultOptions.customise_commands = ""
49 defaultOptions.inline_custom=False
50 defaultOptions.particleTable = 'pythiapdt'
51 defaultOptions.particleTableList = ['pythiapdt','pdt']
52 defaultOptions.dirin = ''
53 defaultOptions.dirout = ''
54 defaultOptions.filetype = 'EDM'
55 defaultOptions.fileout = 'output.root'
56 defaultOptions.filtername = ''
57 defaultOptions.lazy_download = False
58 defaultOptions.custom_conditions = ''
59 defaultOptions.hltProcess = ''
60 defaultOptions.eventcontent = None
61 defaultOptions.datatier = None
62 defaultOptions.inlineEventContent = True
63 defaultOptions.inlineObjets =''
64 defaultOptions.hideGen=False
65 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
66 defaultOptions.beamspot=None
67 defaultOptions.outputDefinition =''
68 defaultOptions.inputCommands = None
69 defaultOptions.outputCommands = None
70 defaultOptions.inputEventContent = ''
71 defaultOptions.dropDescendant = False
72 defaultOptions.relval = None
73 defaultOptions.profile = None
74 defaultOptions.isRepacked = False
75 defaultOptions.restoreRNDSeeds = False
76 defaultOptions.donotDropOnInput = ''
77 defaultOptions.python_filename =''
78 defaultOptions.io=None
79 defaultOptions.lumiToProcess=None
80 defaultOptions.fast=False
81 defaultOptions.runsAndWeightsForMC = None
82 defaultOptions.runsScenarioForMC = None
83 defaultOptions.runUnscheduled = False
84 defaultOptions.timeoutOutput = False
85 defaultOptions.nThreads = '1'
86 
87 # some helper routines
88 def dumpPython(process,name):
89  theObject = getattr(process,name)
90  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
91  return "process."+name+" = " + theObject.dumpPython("process")
92  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
93  return "process."+name+" = " + theObject.dumpPython()+"\n"
94  else:
95  return "process."+name+" = " + theObject.dumpPython()+"\n"
96 def filesFromList(fileName,s=None):
97  import os
98  import FWCore.ParameterSet.Config as cms
99  prim=[]
100  sec=[]
101  for line in open(fileName,'r'):
102  if line.count(".root")>=2:
103  #two files solution...
104  entries=line.replace("\n","").split()
105  if not entries[0] in prim:
106  prim.append(entries[0])
107  if not entries[1] in sec:
108  sec.append(entries[1])
109  elif (line.find(".root")!=-1):
110  entry=line.replace("\n","")
111  if not entry in prim:
112  prim.append(entry)
113  if s:
114  if not hasattr(s,"fileNames"):
115  s.fileNames=cms.untracked.vstring(prim)
116  else:
117  s.fileNames.extend(prim)
118  if len(sec)!=0:
119  if not hasattr(s,"secondaryFileNames"):
120  s.secondaryFileNames=cms.untracked.vstring(sec)
121  else:
122  s.secondaryFileNames.extend(sec)
123  print "found files: ",prim
124  if len(prim)==0:
125  raise Exception("There are not files in input from the file list")
126  if len(sec)!=0:
127  print "found parent files:",sec
128  return (prim,sec)
129 
130 def filesFromDASQuery(query,option="",s=None):
131  import os,time
132  import FWCore.ParameterSet.Config as cms
133  prim=[]
134  sec=[]
135  print "the query is",query
136  eC=5
137  count=0
138  while eC!=0 and count<3:
139  if count!=0:
140  print 'Sleeping, then retrying DAS'
141  time.sleep(100)
142  p = Popen('das_client %s --query "%s"'%(option,query), stdout=PIPE,shell=True)
143  pipe=p.stdout.read()
144  tupleP = os.waitpid(p.pid, 0)
145  eC=tupleP[1]
146  count=count+1
147  if eC==0:
148  print "DAS succeeded after",count,"attempts",eC
149  else:
150  print "DAS failed 3 times- I give up"
151  for line in pipe.split('\n'):
152  if line.count(".root")>=2:
153  #two files solution...
154  entries=line.replace("\n","").split()
155  if not entries[0] in prim:
156  prim.append(entries[0])
157  if not entries[1] in sec:
158  sec.append(entries[1])
159  elif (line.find(".root")!=-1):
160  entry=line.replace("\n","")
161  if not entry in prim:
162  prim.append(entry)
163  if s:
164  if not hasattr(s,"fileNames"):
165  s.fileNames=cms.untracked.vstring(prim)
166  else:
167  s.fileNames.extend(prim)
168  if len(sec)!=0:
169  if not hasattr(s,"secondaryFileNames"):
170  s.secondaryFileNames=cms.untracked.vstring(sec)
171  else:
172  s.secondaryFileNames.extend(sec)
173  print "found files: ",prim
174  if len(sec)!=0:
175  print "found parent files:",sec
176  return (prim,sec)
177 
178 def anyOf(listOfKeys,dict,opt=None):
179  for k in listOfKeys:
180  if k in dict:
181  toReturn=dict[k]
182  dict.pop(k)
183  return toReturn
184  if opt!=None:
185  return opt
186  else:
187  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
188 
190  """The main building routines """
191 
192  def __init__(self, options, process = None, with_output = False, with_input = False ):
193  """options taken from old cmsDriver and optparse """
194 
195  options.outfile_name = options.dirout+options.fileout
196 
197  self._options = options
198 
199  if self._options.isData and options.isMC:
200  raise Exception("ERROR: You may specify only --data or --mc, not both")
201  #if not self._options.conditions:
202  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
203 
204  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
205  if 'ENDJOB' in self._options.step:
206  if (hasattr(self._options,"outputDefinition") and \
207  self._options.outputDefinition != '' and \
208  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
209  (hasattr(self._options,"datatier") and \
210  self._options.datatier and \
211  'DQMIO' in self._options.datatier):
212  print "removing ENDJOB from steps since not compatible with DQMIO dataTier"
213  self._options.step=self._options.step.replace(',ENDJOB','')
214 
215 
216 
217  # what steps are provided by this class?
218  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
219  self.stepMap={}
220  self.stepKeys=[]
221  for step in self._options.step.split(","):
222  if step=='': continue
223  stepParts = step.split(":")
224  stepName = stepParts[0]
225  if stepName not in stepList and not stepName.startswith('re'):
226  raise ValueError("Step "+stepName+" unknown")
227  if len(stepParts)==1:
228  self.stepMap[stepName]=""
229  elif len(stepParts)==2:
230  self.stepMap[stepName]=stepParts[1].split('+')
231  elif len(stepParts)==3:
232  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
233  else:
234  raise ValueError("Step definition "+step+" invalid")
235  self.stepKeys.append(stepName)
236 
237  #print "map of steps is:",self.stepMap
238 
239  self.with_output = with_output
240  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
241  self.with_output = False
242  self.with_input = with_input
243  if process == None:
244  self.process = cms.Process(self._options.name)
245  else:
246  self.process = process
247  self.imports = []
248  self.define_Configs()
249  self.schedule = list()
250 
251  # we are doing three things here:
252  # creating a process to catch errors
253  # building the code to re-create the process
254 
255  self.additionalCommands = []
256  # TODO: maybe a list of to be dumped objects would help as well
257  self.blacklist_paths = []
258  self.addedObjects = []
259  self.additionalOutputs = {}
260 
261  self.productionFilterSequence = None
262  self.labelsToAssociate=[]
263  self.nextScheduleIsConditional=False
264  self.conditionalPaths=[]
265  self.excludedPaths=[]
266 
267  def profileOptions(self):
268  """
269  addIgProfService
270  Function to add the igprof profile service so that you can dump in the middle
271  of the run.
272  """
273  profileOpts = self._options.profile.split(':')
274  profilerStart = 1
275  profilerInterval = 100
276  profilerFormat = None
277  profilerJobFormat = None
278 
279  if len(profileOpts):
280  #type, given as first argument is unused here
281  profileOpts.pop(0)
282  if len(profileOpts):
283  startEvent = profileOpts.pop(0)
284  if not startEvent.isdigit():
285  raise Exception("%s is not a number" % startEvent)
286  profilerStart = int(startEvent)
287  if len(profileOpts):
288  eventInterval = profileOpts.pop(0)
289  if not eventInterval.isdigit():
290  raise Exception("%s is not a number" % eventInterval)
291  profilerInterval = int(eventInterval)
292  if len(profileOpts):
293  profilerFormat = profileOpts.pop(0)
294 
295 
296  if not profilerFormat:
297  profilerFormat = "%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace("_cfi", ""),
298  self._options.step,
299  self._options.pileup,
300  self._options.conditions,
301  self._options.datatier,
302  self._options.profileTypeLabel)
303  if not profilerJobFormat and profilerFormat.endswith(".gz"):
304  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
305  elif not profilerJobFormat:
306  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
307 
308  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
309 
310  def load(self,includeFile):
311  includeFile = includeFile.replace('/','.')
312  self.process.load(includeFile)
313  return sys.modules[includeFile]
314 
315  def loadAndRemember(self, includeFile):
316  """helper routine to load am memorize imports"""
317  # we could make the imports a on-the-fly data method of the process instance itself
318  # not sure if the latter is a good idea
319  includeFile = includeFile.replace('/','.')
320  self.imports.append(includeFile)
321  self.process.load(includeFile)
322  return sys.modules[includeFile]
323 
324  def executeAndRemember(self, command):
325  """helper routine to remember replace statements"""
326  self.additionalCommands.append(command)
327  if not command.strip().startswith("#"):
328  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
329  import re
330  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
331  #exec(command.replace("process.","self.process."))
332 
333  def addCommon(self):
334  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
335  self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring('ProductNotFound'),fileMode = cms.untracked.string('FULLMERGE'))
336  else:
337  self.process.options = cms.untracked.PSet( )
338 
339  self.addedObjects.append(("","options"))
340 
341  if self._options.lazy_download:
342  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
343  stats = cms.untracked.bool(True),
344  enable = cms.untracked.bool(True),
345  cacheHint = cms.untracked.string("lazy-download"),
346  readHint = cms.untracked.string("read-ahead-buffered")
347  )
348  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
349 
350  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
351  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
352 
353  if self._options.profile:
354  (start, interval, eventFormat, jobFormat)=self.profileOptions()
355  self.process.IgProfService = cms.Service("IgProfService",
356  reportFirstEvent = cms.untracked.int32(start),
357  reportEventInterval = cms.untracked.int32(interval),
358  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
359  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
360  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
361 
362  def addMaxEvents(self):
363  """Here we decide how many evts will be processed"""
364  self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(int(self._options.number)))
365  if self._options.number_out:
366  self.process.maxEvents.output = cms.untracked.int32(int(self._options.number_out))
367  self.addedObjects.append(("","maxEvents"))
368 
369  def addSource(self):
370  """Here the source is built. Priority: file, generator"""
371  self.addedObjects.append(("Input source","source"))
372 
373  def filesFromOption(self):
374  for entry in self._options.filein.split(','):
375  print "entry",entry
376  if entry.startswith("filelist:"):
377  filesFromList(entry[9:],self.process.source)
378  elif entry.startswith("dbs:") or entry.startswith("das:"):
379  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
380  else:
381  self.process.source.fileNames.append(self._options.dirin+entry)
382  if self._options.secondfilein:
383  if not hasattr(self.process.source,"secondaryFileNames"):
384  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
385  for entry in self._options.secondfilein.split(','):
386  print "entry",entry
387  if entry.startswith("filelist:"):
388  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
389  elif entry.startswith("dbs:") or entry.startswith("das:"):
390  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
391  else:
392  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
393 
394  if self._options.filein or self._options.dasquery:
395  if self._options.filetype == "EDM":
396  self.process.source=cms.Source("PoolSource",
397  fileNames = cms.untracked.vstring(),
398  secondaryFileNames= cms.untracked.vstring())
399  filesFromOption(self)
400  elif self._options.filetype == "DAT":
401  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
402  filesFromOption(self)
403  elif self._options.filetype == "LHE":
404  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
405  if self._options.filein.startswith("lhe:"):
406  #list the article directory automatically
407  args=self._options.filein.split(':')
408  article=args[1]
409  print 'LHE input from article ',article
410  location='/store/lhe/'
411  import os
412  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
413  for line in textOfFiles:
414  for fileName in [x for x in line.split() if '.lhe' in x]:
415  self.process.source.fileNames.append(location+article+'/'+fileName)
416  #check first if list of LHE files is loaded (not empty)
417  if len(line)<2:
418  print 'Issue to load LHE files, please check and try again.'
419  sys.exit(-1)
420  if len(args)>2:
421  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
422  else:
423  filesFromOption(self)
424 
425 
426  elif self._options.filetype == "DQM":
427  self.process.source=cms.Source("DQMRootSource",
428  fileNames = cms.untracked.vstring())
429  filesFromOption(self)
430 
431  elif self._options.filetype == "DQMDAQ":
432  # FIXME: how to configure it if there are no input files specified?
433  self.process.source=cms.Source("DQMStreamerReader")
434 
435 
436  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
437  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
438 
439  if self._options.dasquery!='':
440  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
441  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
442 
443  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
444  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
445 
446  ##drop LHEXMLStringProduct on input to save memory if appropriate
447  if 'GEN' in self.stepMap.keys():
448  if self._options.inputCommands:
449  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
450  else:
451  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
452 
453  if self.process.source and self._options.inputCommands:
454  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
455  for command in self._options.inputCommands.split(','):
456  # remove whitespace around the keep/drop statements
457  command = command.strip()
458  if command=='': continue
459  self.process.source.inputCommands.append(command)
460  if not self._options.dropDescendant:
461  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
462 
463  if self._options.lumiToProcess:
464  import FWCore.PythonUtilities.LumiList as LumiList
465  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
466 
467  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
468  if self.process.source is None:
469  self.process.source=cms.Source("EmptySource")
470 
471  # modify source in case of run-dependent MC
472  self.runsAndWeights=None
473  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
474  if not self._options.isMC :
475  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
476  if self._options.runsAndWeightsForMC:
477  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
478  else:
479  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
480  if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
481  __import__(RunsAndWeights[self._options.runsScenarioForMC])
482  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
483  else:
484  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
485 
486  if self.runsAndWeights:
487  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
488  ThrowAndSetRandomRun.throwAndSetRandomRun(self.process.source,self.runsAndWeights)
489  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
490  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
491 
492  return
493 
494  def addOutput(self):
495  """ Add output module to the process """
496  result=""
497  if self._options.outputDefinition:
498  if self._options.datatier:
499  print "--datatier & --eventcontent options ignored"
500 
501  #new output convention with a list of dict
502  outList = eval(self._options.outputDefinition)
503  for (id,outDefDict) in enumerate(outList):
504  outDefDictStr=outDefDict.__str__()
505  if not isinstance(outDefDict,dict):
506  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
507  #requires option: tier
508  theTier=anyOf(['t','tier','dataTier'],outDefDict)
509  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
510  ## event content
511  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
512  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
513  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
514  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
515  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
516  # module label has a particular role
517  if not theModuleLabel:
518  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
519  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
520  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
521  ]
522  for name in tryNames:
523  if not hasattr(self.process,name):
524  theModuleLabel=name
525  break
526  if not theModuleLabel:
527  raise Exception("cannot find a module label for specification: "+outDefDictStr)
528  if id==0:
529  defaultFileName=self._options.outfile_name
530  else:
531  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
532 
533  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
534  if not theFileName.endswith('.root'):
535  theFileName+='.root'
536 
537  if len(outDefDict.keys()):
538  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
539  if theStreamType=='DQMIO': theStreamType='DQM'
540  if theStreamType=='ALL':
541  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
542  else:
543  theEventContent = getattr(self.process, theStreamType+"EventContent")
544 
545 
546  addAlCaSelects=False
547  if theStreamType=='ALCARECO' and not theFilterName:
548  theFilterName='StreamALCACombined'
549  addAlCaSelects=True
550 
551  CppType='PoolOutputModule'
552  if self._options.timeoutOutput:
553  CppType='TimeoutPoolOutputModule'
554  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
555  output = cms.OutputModule(CppType,
556  theEventContent.clone(),
557  fileName = cms.untracked.string(theFileName),
558  dataset = cms.untracked.PSet(
559  dataTier = cms.untracked.string(theTier),
560  filterName = cms.untracked.string(theFilterName))
561  )
562  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
563  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
564  if not theSelectEvent and hasattr(self.process,'filtering_step'):
565  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
566  if theSelectEvent:
567  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
568 
569  if addAlCaSelects:
570  if not hasattr(output,'SelectEvents'):
571  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
572  for alca in self.AlCaPaths:
573  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
574 
575 
576  if hasattr(self.process,theModuleLabel):
577  raise Exception("the current process already has a module "+theModuleLabel+" defined")
578  #print "creating output module ",theModuleLabel
579  setattr(self.process,theModuleLabel,output)
580  outputModule=getattr(self.process,theModuleLabel)
581  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
582  path=getattr(self.process,theModuleLabel+'_step')
583  self.schedule.append(path)
584 
585  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
586  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"):
587  return label
588  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
589  if theExtraOutputCommands:
590  if not isinstance(theExtraOutputCommands,list):
591  raise Exception("extra ouput command in --option must be a list of strings")
592  if hasattr(self.process,theStreamType+"EventContent"):
593  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
594  else:
595  outputModule.outputCommands.extend(theExtraOutputCommands)
596 
597  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
598 
599  ##ends the --output options model
600  return result
601 
602  streamTypes=self._options.eventcontent.split(',')
603  tiers=self._options.datatier.split(',')
604  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
605  raise Exception("number of event content arguments does not match number of datatier arguments")
606 
607  # if the only step is alca we don't need to put in an output
608  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
609  return "\n"
610 
611  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
612  if streamType=='': continue
613  if streamType == 'ALCARECO' and not 'ALCAPRODUCER' in self._options.step: continue
614  if streamType=='DQMIO': streamType='DQM'
615  theEventContent = getattr(self.process, streamType+"EventContent")
616  if i==0:
617  theFileName=self._options.outfile_name
618  theFilterName=self._options.filtername
619  else:
620  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
621  theFilterName=self._options.filtername
622  CppType='PoolOutputModule'
623  if self._options.timeoutOutput:
624  CppType='TimeoutPoolOutputModule'
625  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
626  output = cms.OutputModule(CppType,
627  theEventContent,
628  fileName = cms.untracked.string(theFileName),
629  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
630  filterName = cms.untracked.string(theFilterName)
631  )
632  )
633  if hasattr(self.process,"generation_step") and streamType!='LHE':
634  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
635  if hasattr(self.process,"filtering_step"):
636  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
637 
638  if streamType=='ALCARECO':
639  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
640 
641  if "MINIAOD" in streamType:
642  output.dropMetaData = cms.untracked.string('ALL')
643  output.fastCloning= cms.untracked.bool(False)
644  output.overrideInputFileSplitLevels = cms.untracked.bool(True)
645 
646  outputModuleName=streamType+'output'
647  setattr(self.process,outputModuleName,output)
648  outputModule=getattr(self.process,outputModuleName)
649  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
650  path=getattr(self.process,outputModuleName+'_step')
651  self.schedule.append(path)
652 
653  if self._options.outputCommands and streamType!='DQM':
654  for evct in self._options.outputCommands.split(','):
655  if not evct: continue
656  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
657 
658  if not self._options.inlineEventContent:
659  def doNotInlineEventContent(instance,label = "process."+streamType+"EventContent.outputCommands"):
660  return label
661  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
662 
663  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
664 
665  return result
666 
668  """
669  Add selected standard sequences to the process
670  """
671  # load the pile up file
672  if self._options.pileup:
673  pileupSpec=self._options.pileup.split(',')[0]
674 
675  # Does the requested pile-up scenario exist?
676  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
677  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
678  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
679  raise Exception(message)
680 
681  # Put mixing parameters in a dictionary
682  if '.' in pileupSpec:
683  mixingDict={'file':pileupSpec}
684  elif pileupSpec.startswith('file:'):
685  mixingDict={'file':pileupSpec[5:]}
686  else:
687  import copy
688  mixingDict=copy.copy(Mixing[pileupSpec])
689  if len(self._options.pileup.split(','))>1:
690  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
691 
692  # Load the pu cfg file corresponding to the requested pu scenario
693  if 'file:' in pileupSpec:
694  #the file is local
695  self.process.load(mixingDict['file'])
696  print "inlining mixing module configuration"
697  self._options.inlineObjets+=',mix'
698  else:
699  self.loadAndRemember(mixingDict['file'])
700 
701  mixingDict.pop('file')
702  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
703  if self._options.pileup_input:
704  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
705  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
706  elif self._options.pileup_input.startswith("filelist:"):
707  mixingDict['F']=(filesFromList(self._options.pileup_input[9:]))[0]
708  else:
709  mixingDict['F']=self._options.pileup_input.split(',')
710  specialization=defineMixing(mixingDict)
711  for command in specialization:
712  self.executeAndRemember(command)
713  if len(mixingDict)!=0:
714  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
715 
716 
717  # load the geometry file
718  try:
719  if len(self.stepMap):
720  self.loadAndRemember(self.GeometryCFF)
721  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
722  self.loadAndRemember(self.SimGeometryCFF)
723  if self.geometryDBLabel:
724  self.executeAndRemember('process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
725  except ImportError:
726  print "Geometry option",self._options.geometry,"unknown."
727  raise
728 
729  if len(self.stepMap):
730  self.loadAndRemember(self.magFieldCFF)
731 
732  for stepName in self.stepKeys:
733  stepSpec = self.stepMap[stepName]
734  print "Step:", stepName,"Spec:",stepSpec
735  if stepName.startswith('re'):
736  ##add the corresponding input content
737  if stepName[2:] not in self._options.donotDropOnInput:
738  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
739  stepName=stepName[2:]
740  if stepSpec=="":
741  getattr(self,"prepare_"+stepName)(sequence = getattr(self,stepName+"DefaultSeq"))
742  elif type(stepSpec)==list:
743  getattr(self,"prepare_"+stepName)(sequence = '+'.join(stepSpec))
744  elif type(stepSpec)==tuple:
745  getattr(self,"prepare_"+stepName)(sequence = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
746  else:
747  raise ValueError("Invalid step definition")
748 
749  if self._options.restoreRNDSeeds!=False:
750  #it is either True, or a process name
751  if self._options.restoreRNDSeeds==True:
752  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
753  else:
754  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
755  if self._options.inputEventContent or self._options.inputCommands:
756  if self._options.inputCommands:
757  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
758  else:
759  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
760 
761 
763  if self._options.inputEventContent:
764  import copy
765  def dropSecondDropStar(iec):
766  #drop occurence of 'drop *' in the list
767  count=0
768  for item in iec:
769  if item=='drop *':
770  if count!=0:
771  iec.remove(item)
772  count+=1
773 
774 
775  ## allow comma separated input eventcontent
776  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
777  for evct in self._options.inputEventContent.split(','):
778  if evct=='': continue
779  theEventContent = getattr(self.process, evct+"EventContent")
780  if hasattr(theEventContent,'outputCommands'):
781  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
782  if hasattr(theEventContent,'inputCommands'):
783  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
784 
785  dropSecondDropStar(self.process.source.inputCommands)
786 
787  if not self._options.dropDescendant:
788  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
789 
790 
791  return
792 
793  def addConditions(self):
794  """Add conditions to the process"""
795  if not self._options.conditions: return
796 
797  if 'FrontierConditions_GlobalTag' in self._options.conditions:
798  print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line'
799  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
800 
801  self.loadAndRemember(self.ConditionsDefaultCFF)
802  from Configuration.AlCa.GlobalTag import GlobalTag
803  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
804  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
805  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
806 
807 
808  def addCustomise(self,unsch=0):
809  """Include the customise code """
810 
811  custOpt=[]
812  if unsch==0:
813  for c in self._options.customisation_file:
814  custOpt.extend(c.split(","))
815  else:
816  for c in self._options.customisation_file_unsch:
817  custOpt.extend(c.split(","))
818 
819  custMap=DictTypes.SortedKeysDict()
820  for opt in custOpt:
821  if opt=='': continue
822  if opt.count('.')>1:
823  raise Exception("more than . in the specification:"+opt)
824  fileName=opt.split('.')[0]
825  if opt.count('.')==0: rest='customise'
826  else:
827  rest=opt.split('.')[1]
828  if rest=='py': rest='customise' #catch the case of --customise file.py
829 
830  if fileName in custMap:
831  custMap[fileName].extend(rest.split('+'))
832  else:
833  custMap[fileName]=rest.split('+')
834 
835  if len(custMap)==0:
836  final_snippet='\n'
837  else:
838  final_snippet='\n# customisation of the process.\n'
839 
840  allFcn=[]
841  for opt in custMap:
842  allFcn.extend(custMap[opt])
843  for fcn in allFcn:
844  if allFcn.count(fcn)!=1:
845  raise Exception("cannot specify twice "+fcn+" as a customisation method")
846 
847  for f in custMap:
848  # let python search for that package and do syntax checking at the same time
849  packageName = f.replace(".py","").replace("/",".")
850  __import__(packageName)
851  package = sys.modules[packageName]
852 
853  # now ask the package for its definition and pick .py instead of .pyc
854  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
855 
856  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
857  if self._options.inline_custom:
858  for line in file(customiseFile,'r'):
859  if "import FWCore.ParameterSet.Config" in line:
860  continue
861  final_snippet += line
862  else:
863  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
864  for fcn in custMap[f]:
865  print "customising the process with",fcn,"from",f
866  if not hasattr(package,fcn):
867  #bound to fail at run time
868  raise Exception("config "+f+" has no function "+fcn)
869  #execute the command
870  self.process=getattr(package,fcn)(self.process)
871  #and print it in the configuration
872  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
873  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
874 
875  if len(custMap)!=0:
876  final_snippet += '\n# End of customisation functions\n'
877 
878  ### now for a useful command
879  return final_snippet
880 
882  final_snippet='\n# Customisation from command line\n'
883  if self._options.customise_commands:
884  import string
885  for com in self._options.customise_commands.split('\\n'):
886  com=string.lstrip(com)
887  self.executeAndRemember(com)
888  final_snippet +='\n'+com
889 
890  return final_snippet
891 
892  #----------------------------------------------------------------------------
893  # here the methods to define the python includes for each step or
894  # conditions
895  #----------------------------------------------------------------------------
896  def define_Configs(self):
897  if len(self.stepMap):
898  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
899  if self._options.particleTable not in defaultOptions.particleTableList:
900  print 'Invalid particle table provided. Options are:'
901  print defaultOptions.particleTable
902  sys.exit(-1)
903  else:
904  if len(self.stepMap):
905  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
906 
907  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
908 
909  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
910  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
911  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
912  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
913  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
914  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
915  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
916  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
917  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
918  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
919  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
920  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
921  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
922  self.EIDefaultCFF=None
923  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
924  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
925  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
926  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
927  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
928  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
929  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
930  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
931  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
932  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
933  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
934 
935  if "DATAMIX" in self.stepMap.keys():
936  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
937  if self._options.datamix == 'PreMix':
938  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDMPreMix_cff"
939  else:
940  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
941  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
942  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
943 
944  if "DIGIPREMIX" in self.stepMap.keys():
945  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_PreMix_cff"
946  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawPreMixing_cff"
947  self.L1EMDefaultCFF="Configuration/StandardSequences/SimL1EmulatorPreMix_cff"
948 
949  self.ALCADefaultSeq=None
950  self.LHEDefaultSeq='externalLHEProducer'
951  self.GENDefaultSeq='pgen'
952  self.SIMDefaultSeq='psim'
953  self.DIGIDefaultSeq='pdigi'
954  self.DIGIPREMIXDefaultSeq='pdigi'
955  self.DIGIPREMIX_S2DefaultSeq='pdigi'
956  self.DATAMIXDefaultSeq=None
957  self.DIGI2RAWDefaultSeq='DigiToRaw'
958  self.HLTDefaultSeq='GRun'
959  self.L1DefaultSeq=None
960  self.L1REPACKDefaultSeq='GT'
961  self.HARVESTINGDefaultSeq=None
962  self.ALCAHARVESTDefaultSeq=None
963  self.CFWRITERDefaultSeq=None
964  self.RAW2DIGIDefaultSeq='RawToDigi'
965  self.L1RecoDefaultSeq='L1Reco'
966  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
967  if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
968  self.RECODefaultSeq='reconstruction'
969  else:
970  self.RECODefaultSeq='reconstruction_fromRECO'
971 
972  self.EIDefaultSeq='top'
973  self.POSTRECODefaultSeq=None
974  self.L1HwValDefaultSeq='L1HwVal'
975  self.DQMDefaultSeq='DQMOffline'
976  self.VALIDATIONDefaultSeq=''
977  self.ENDJOBDefaultSeq='endOfProcess'
978  self.REPACKDefaultSeq='DigiToRawRepack'
979  self.PATDefaultSeq='miniAOD'
980 
981  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
982 
983  if not self._options.beamspot:
984  self._options.beamspot=VtxSmearedDefaultKey
985 
986  # if its MC then change the raw2digi
987  if self._options.isMC==True:
988  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
989  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
990  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
991  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
992  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
993  else:
994  self._options.beamspot = None
995 
996  #patch for gen, due to backward incompatibility
997  if 'reGEN' in self.stepMap:
998  self.GENDefaultSeq='fixGenInfo'
999 
1000  if self._options.scenario=='cosmics':
1001  self._options.pileup='Cosmics'
1002  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1003  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1004  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1005  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1006  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1007  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1008  if self._options.isMC==True:
1009  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1010  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1011  self.RECODefaultSeq='reconstructionCosmics'
1012  self.DQMDefaultSeq='DQMOfflineCosmics'
1013 
1014  if self._options.scenario=='HeavyIons':
1015  if not self._options.beamspot:
1016  self._options.beamspot=VtxSmearedHIDefaultKey
1017  self.HLTDefaultSeq = 'HIon'
1018  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1019  self.VALIDATIONDefaultSeq=''
1020  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1021  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1022  self.RECODefaultSeq='reconstructionHeavyIons'
1023  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1024  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1025  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1026  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1027  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1028  if self._options.isMC==True:
1029  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1030 
1031 
1032  self.RAW2RECODefaultSeq=','.join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1033 
1034  self.USERDefaultSeq='user'
1035  self.USERDefaultCFF=None
1036 
1037  # the magnetic field
1038  if self._options.isData:
1039  if self._options.magField==defaultOptions.magField:
1040  print "magnetic field option forced to: AutoFromDBCurrent"
1041  self._options.magField='AutoFromDBCurrent'
1042  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1043  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1044 
1045  # the geometry
1046  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1047  self.geometryDBLabel=None
1048  simGeometry=''
1049  if self._options.fast:
1050  if 'start' in self._options.conditions.lower():
1051  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1052  else:
1053  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1054  else:
1055  def inGeometryKeys(opt):
1056  from Configuration.StandardSequences.GeometryConf import GeometryConf
1057  if opt in GeometryConf:
1058  return GeometryConf[opt]
1059  else:
1060  return opt
1061 
1062  geoms=self._options.geometry.split(',')
1063  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1064  if len(geoms)==2:
1065  #may specify the reco geometry
1066  if '/' in geoms[1] or '_cff' in geoms[1]:
1067  self.GeometryCFF=geoms[1]
1068  else:
1069  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1070 
1071  if (geoms[0].startswith('DB:')):
1072  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1073  self.geometryDBLabel=geoms[0][3:]
1074  print "with DB:"
1075  else:
1076  if '/' in geoms[0] or '_cff' in geoms[0]:
1077  self.SimGeometryCFF=geoms[0]
1078  else:
1079  simGeometry=geoms[0]
1080  if self._options.gflash==True:
1081  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1082  else:
1083  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1084 
1085  # synchronize the geometry configuration and the FullSimulation sequence to be used
1086  if simGeometry not in defaultOptions.geometryExtendedOptions:
1087  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1088 
1089  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1090  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1091  self._options.beamspot='NoSmear'
1092 
1093  # fastsim requires some changes to the default cff files and sequences
1094  if self._options.fast:
1095  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1096  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1097  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1098  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1099  self.DQMOFFLINEDefaultCFF="FastSimulation.Configuration.DQMOfflineMC_cff"
1100 
1101  # Mixing
1102  if self._options.pileup=='default':
1103  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1104  self._options.pileup=MixingDefaultKey
1105 
1106 
1107  #not driven by a default cff anymore
1108  if self._options.isData:
1109  self._options.pileup=None
1110 
1111 
1112  self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1113 
1114  # for alca, skims, etc
1115  def addExtraStream(self,name,stream,workflow='full'):
1116  # define output module and go from there
1117  output = cms.OutputModule("PoolOutputModule")
1118  if stream.selectEvents.parameters_().__len__()!=0:
1119  output.SelectEvents = stream.selectEvents
1120  else:
1121  output.SelectEvents = cms.untracked.PSet()
1122  output.SelectEvents.SelectEvents=cms.vstring()
1123  if isinstance(stream.paths,tuple):
1124  for path in stream.paths:
1125  output.SelectEvents.SelectEvents.append(path.label())
1126  else:
1127  output.SelectEvents.SelectEvents.append(stream.paths.label())
1128 
1129 
1130 
1131  if isinstance(stream.content,str):
1132  evtPset=getattr(self.process,stream.content)
1133  for p in evtPset.parameters_():
1134  setattr(output,p,getattr(evtPset,p))
1135  if not self._options.inlineEventContent:
1136  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1137  return label
1138  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1139  else:
1140  output.outputCommands = stream.content
1141 
1142 
1143  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1144 
1145  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1146  filterName = cms.untracked.string(stream.name))
1147 
1148  if self._options.filtername:
1149  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1150 
1151  #add an automatic flushing to limit memory consumption
1152  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1153 
1154  if workflow in ("producers,full"):
1155  if isinstance(stream.paths,tuple):
1156  for path in stream.paths:
1157  self.schedule.append(path)
1158  else:
1159  self.schedule.append(stream.paths)
1160 
1161 
1162  # in case of relvals we don't want to have additional outputs
1163  if (not self._options.relval) and workflow in ("full","output"):
1164  self.additionalOutputs[name] = output
1165  setattr(self.process,name,output)
1166 
1167  if workflow == 'output':
1168  # adjust the select events to the proper trigger results from previous process
1169  filterList = output.SelectEvents.SelectEvents
1170  for i, filter in enumerate(filterList):
1171  filterList[i] = filter+":"+self._options.triggerResultsProcess
1172 
1173  return output
1174 
1175  #----------------------------------------------------------------------------
1176  # here the methods to create the steps. Of course we are doing magic here ;)
1177  # prepare_STEPNAME modifies self.process and what else's needed.
1178  #----------------------------------------------------------------------------
1179 
1180  def loadDefaultOrSpecifiedCFF(self, sequence,defaultCFF):
1181  if ( len(sequence.split('.'))==1 ):
1182  l=self.loadAndRemember(defaultCFF)
1183  elif ( len(sequence.split('.'))==2 ):
1184  l=self.loadAndRemember(sequence.split('.')[0])
1185  sequence=sequence.split('.')[1]
1186  else:
1187  print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a"
1188  print sequence,"not recognized"
1189  raise
1190  return l
1191 
1192  def scheduleSequence(self,seq,prefix,what='Path'):
1193  if '*' in seq:
1194  #create only one path with all sequences in it
1195  for i,s in enumerate(seq.split('*')):
1196  if i==0:
1197  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1198  else:
1199  p=getattr(self.process,prefix)
1200  p+=getattr(self.process, s)
1201  self.schedule.append(getattr(self.process,prefix))
1202  return
1203  else:
1204  #create as many path as many sequences
1205  if not '+' in seq:
1206  if self.nextScheduleIsConditional:
1207  self.conditionalPaths.append(prefix)
1208  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1209  self.schedule.append(getattr(self.process,prefix))
1210  else:
1211  for i,s in enumerate(seq.split('+')):
1212  sn=prefix+'%d'%(i)
1213  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1214  self.schedule.append(getattr(self.process,sn))
1215  return
1216 
1217  def scheduleSequenceAtEnd(self,seq,prefix):
1218  self.scheduleSequence(seq,prefix,what='EndPath')
1219  return
1220 
1221  def prepare_ALCAPRODUCER(self, sequence = None):
1222  self.prepare_ALCA(sequence, workflow = "producers")
1223 
1224  def prepare_ALCAOUTPUT(self, sequence = None):
1225  self.prepare_ALCA(sequence, workflow = "output")
1226 
1227  def prepare_ALCA(self, sequence = None, workflow = 'full'):
1228  """ Enrich the process with alca streams """
1229  alcaConfig=self.loadDefaultOrSpecifiedCFF(sequence,self.ALCADefaultCFF)
1230  sequence = sequence.split('.')[-1]
1231 
1232  # decide which ALCA paths to use
1233  alcaList = sequence.split("+")
1234  maxLevel=0
1235  from Configuration.AlCa.autoAlca import autoAlca
1236  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1237  self.expandMapping(alcaList,autoAlca)
1238  self.AlCaPaths=[]
1239  for name in alcaConfig.__dict__:
1240  alcastream = getattr(alcaConfig,name)
1241  shortName = name.replace('ALCARECOStream','')
1242  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1243  output = self.addExtraStream(name,alcastream, workflow = workflow)
1244  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1245  self.AlCaPaths.append(shortName)
1246  if 'DQM' in alcaList:
1247  if not self._options.inlineEventContent and hasattr(self.process,name):
1248  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1249  else:
1250  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1251 
1252  #rename the HLT process name in the alca modules
1253  if self._options.hltProcess or 'HLT' in self.stepMap:
1254  if isinstance(alcastream.paths,tuple):
1255  for path in alcastream.paths:
1256  self.renameHLTprocessInSequence(path.label())
1257  else:
1258  self.renameHLTprocessInSequence(alcastream.paths.label())
1259 
1260  for i in range(alcaList.count(shortName)):
1261  alcaList.remove(shortName)
1262 
1263  # DQM needs a special handling
1264  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1265  path = getattr(alcaConfig,name)
1266  self.schedule.append(path)
1267  alcaList.remove('DQM')
1268 
1269  if isinstance(alcastream,cms.Path):
1270  #black list the alca path so that they do not appear in the cfg
1271  self.blacklist_paths.append(alcastream)
1272 
1273 
1274  if len(alcaList) != 0:
1275  available=[]
1276  for name in alcaConfig.__dict__:
1277  alcastream = getattr(alcaConfig,name)
1278  if isinstance(alcastream,cms.FilteredStream):
1279  available.append(name.replace('ALCARECOStream',''))
1280  print "The following alcas could not be found "+str(alcaList)
1281  print "available ",available
1282  #print "verify your configuration, ignoring for now"
1283  raise Exception("The following alcas could not be found "+str(alcaList))
1284 
1285  def prepare_LHE(self, sequence = None):
1286  #load the fragment
1287  ##make it loadable
1288  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1289  print "Loading lhe fragment from",loadFragment
1290  __import__(loadFragment)
1291  self.process.load(loadFragment)
1292  ##inline the modules
1293  self._options.inlineObjets+=','+sequence
1294 
1295  getattr(self.process,sequence).nEvents = int(self._options.number)
1296 
1297  #schedule it
1298  self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1299  self.excludedPaths.append("lhe_step")
1300  self.schedule.append( self.process.lhe_step )
1301 
1302  def prepare_GEN(self, sequence = None):
1303  """ load the fragment of generator configuration """
1304  loadFailure=False
1305  #remove trailing .py
1306  #support old style .cfi by changing into something.cfi into something_cfi
1307  #remove python/ from the name
1308  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1309  #standard location of fragments
1310  if not '/' in loadFragment:
1311  loadFragment='Configuration.Generator.'+loadFragment
1312  else:
1313  loadFragment=loadFragment.replace('/','.')
1314  try:
1315  print "Loading generator fragment from",loadFragment
1316  __import__(loadFragment)
1317  except:
1318  loadFailure=True
1319  #if self.process.source and self.process.source.type_()=='EmptySource':
1320  if not (self._options.filein or self._options.dasquery):
1321  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1322 
1323  if not loadFailure:
1324  generatorModule=sys.modules[loadFragment]
1325  genModules=generatorModule.__dict__
1326  #remove lhe producer module since this should have been
1327  #imported instead in the LHE step
1328  if self.LHEDefaultSeq in genModules:
1329  del genModules[self.LHEDefaultSeq]
1330 
1331  if self._options.hideGen:
1332  self.loadAndRemember(loadFragment)
1333  else:
1334  self.process.load(loadFragment)
1335  # expose the objects from that fragment to the configuration
1336  import FWCore.ParameterSet.Modules as cmstypes
1337  for name in genModules:
1338  theObject = getattr(generatorModule,name)
1339  if isinstance(theObject, cmstypes._Module):
1340  self._options.inlineObjets=name+','+self._options.inlineObjets
1341  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1342  self._options.inlineObjets+=','+name
1343 
1344  if sequence == self.GENDefaultSeq or sequence == 'pgen_genonly':
1345  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1346  self.productionFilterSequence = 'ProductionFilterSequence'
1347  elif 'generator' in genModules:
1348  self.productionFilterSequence = 'generator'
1349 
1350  """ Enrich the schedule with the rest of the generation step """
1351  self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1352  genSeqName=sequence.split('.')[-1]
1353 
1354  if True:
1355  try:
1356  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1357  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1358  self.loadAndRemember(cffToBeLoaded)
1359  except ImportError:
1360  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1361 
1362  if self._options.scenario == 'HeavyIons':
1363  if self._options.pileup=='HiMixGEN':
1364  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1365  else:
1366  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1367 
1368  self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1369  self.schedule.append(self.process.generation_step)
1370 
1371  #register to the genstepfilter the name of the path (static right now, but might evolve)
1372  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1373 
1374  if 'reGEN' in self.stepMap:
1375  #stop here
1376  return
1377 
1378  """ Enrich the schedule with the summary of the filter step """
1379  #the gen filter in the endpath
1380  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1381  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1382  return
1383 
1384  def prepare_SIM(self, sequence = None):
1385  """ Enrich the schedule with the simulation step"""
1386  self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1387  if not self._options.fast:
1388  if self._options.gflash==True:
1389  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1390 
1391  if self._options.magField=='0T':
1392  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1393  else:
1394  if self._options.magField=='0T':
1395  self.executeAndRemember("process.famosSimHits.UseMagneticField = cms.bool(False)")
1396 
1397  self.scheduleSequence(sequence.split('.')[-1],'simulation_step')
1398  return
1399 
1400  def prepare_DIGI(self, sequence = None):
1401  """ Enrich the schedule with the digitisation step"""
1402  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1403 
1404  if self._options.gflash==True:
1405  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1406 
1407  if sequence == 'pdigi_valid':
1408  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1409 
1410  if sequence != 'pdigi_nogen' and sequence != 'pdigi_valid_nogen' and not self.process.source.type_()=='EmptySource':
1411  if self._options.inputEventContent=='':
1412  self._options.inputEventContent='REGEN'
1413  else:
1414  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1415 
1416 
1417  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1418  return
1419 
1420  def prepare_DIGIPREMIX(self, sequence = None):
1421  """ Enrich the schedule with the digitisation step"""
1422  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1423 
1424  self.loadAndRemember("SimGeneral/MixingModule/digi_noNoise_cfi")
1425 
1426  if sequence == 'pdigi_valid':
1427  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1428  else:
1429  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1430 
1431  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1432  return
1433 
1434  def prepare_DIGIPREMIX_S2(self, sequence = None):
1435  """ Enrich the schedule with the digitisation step"""
1436  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1437 
1438  self.loadAndRemember("SimGeneral/MixingModule/digi_MixPreMix_cfi")
1439 
1440 
1441  if sequence == 'pdigi_valid':
1442  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1443  else:
1444  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1445 
1446  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1447  return
1448 
1449  def prepare_CFWRITER(self, sequence = None):
1450  """ Enrich the schedule with the crossing frame writer step"""
1451  self.loadAndRemember(self.CFWRITERDefaultCFF)
1452  self.scheduleSequence('pcfw','cfwriter_step')
1453  return
1454 
1455  def prepare_DATAMIX(self, sequence = None):
1456  """ Enrich the schedule with the digitisation step"""
1457  self.loadAndRemember(self.DATAMIXDefaultCFF)
1458  self.scheduleSequence('pdatamix','datamixing_step')
1459 
1460  if self._options.pileup_input:
1461  theFiles=''
1462  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1463  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1464  elif self._options.pileup_input.startswith("filelist:"):
1465  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1466  else:
1467  theFiles=self._options.pileup_input.split(',')
1468  #print theFiles
1469  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1470 
1471  return
1472 
1473  def prepare_DIGI2RAW(self, sequence = None):
1474  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGI2RAWDefaultCFF)
1475  self.scheduleSequence(sequence.split('.')[-1],'digi2raw_step')
1476  if "DIGIPREMIX" in self.stepMap.keys():
1477  self.executeAndRemember("process.esDigiToRaw.Label = cms.string('mix')") ##terrible hack - bypass zero suppression
1478  self.executeAndRemember("process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')") ##special readout mode for StripTracker
1479 
1480  return
1481 
1482  def prepare_REPACK(self, sequence = None):
1483  self.loadDefaultOrSpecifiedCFF(sequence,self.REPACKDefaultCFF)
1484  self.scheduleSequence(sequence.split('.')[-1],'digi2repack_step')
1485  return
1486 
1487  def prepare_L1(self, sequence = None):
1488  """ Enrich the schedule with the L1 simulation step"""
1489  assert(sequence == None)
1490  self.loadAndRemember(self.L1EMDefaultCFF)
1491  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1492  return
1493 
1494  def prepare_L1REPACK(self, sequence = None):
1495  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1496  supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT']
1497  if sequence in supported:
1498  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1499  if self._options.scenario == 'HeavyIons':
1500  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1501  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1502  else:
1503  print "L1REPACK with '",sequence,"' is not supported! Supported choices are: ",supported
1504  raise Exception('unsupported feature')
1505 
1506 
1507  def prepare_HLT(self, sequence = None):
1508  """ Enrich the schedule with the HLT simulation step"""
1509  if not sequence:
1510  print "no specification of the hlt menu has been given, should never happen"
1511  raise Exception('no HLT sequence provided')
1512 
1513  if '@' in sequence:
1514  # case where HLT:@something was provided
1515  from Configuration.HLT.autoHLT import autoHLT
1516  key = sequence[1:]
1517  if key in autoHLT:
1518  sequence = autoHLT[key]
1519  else:
1520  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1521 
1522  if ',' in sequence:
1523  #case where HLT:something:something was provided
1524  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1525  optionsForHLT = {}
1526  if self._options.scenario == 'HeavyIons':
1527  optionsForHLT['type'] = 'HIon'
1528  else:
1529  optionsForHLT['type'] = 'GRun'
1530  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.iteritems())
1531  if sequence == 'run,fromSource':
1532  if hasattr(self.process.source,'firstRun'):
1533  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1534  elif hasattr(self.process.source,'setRunNumber'):
1535  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1536  else:
1537  raise Exception('Cannot replace menu to load %s'%(sequence))
1538  else:
1539  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(sequence.replace(',',':'),optionsForHLTConfig))
1540  else:
1541  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % sequence)
1542 
1543  if self._options.isMC:
1544  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1545 
1546  if self._options.name != 'HLT':
1547  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1548  self.additionalCommands.append('process = ProcessName(process)')
1549  self.additionalCommands.append('')
1550  from HLTrigger.Configuration.CustomConfigs import ProcessName
1551  self.process = ProcessName(self.process)
1552 
1553  self.schedule.append(self.process.HLTSchedule)
1554  [self.blacklist_paths.append(path) for path in self.process.HLTSchedule if isinstance(path,(cms.Path,cms.EndPath))]
1555 
1556  #this is a fake, to be removed with fastim migration and HLT menu dump
1557  if self._options.fast:
1558  if not hasattr(self.process,'HLTEndSequence'):
1559  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1560 
1561 
1562  def prepare_RAW2RECO(self, sequence = None):
1563  if ','in sequence:
1564  seqReco=sequence.split(',')[1]
1565  seqDigi=sequence.split(',')[0]
1566  else:
1567  print "RAW2RECO requires two specifications",sequence,"insufficient"
1568 
1569  self.prepare_RAW2DIGI(seqDigi)
1570  self.prepare_RECO(seqReco)
1571  return
1572 
1573  def prepare_RAW2DIGI(self, sequence = "RawToDigi"):
1574  self.loadDefaultOrSpecifiedCFF(sequence,self.RAW2DIGIDefaultCFF)
1575  self.scheduleSequence(sequence,'raw2digi_step')
1576  # if self._options.isRepacked:
1577  #self.renameInputTagsInSequence(sequence)
1578  return
1579 
1580  def prepare_PATFILTER(self, sequence=None):
1581  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1582  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1583  for filt in allMetFilterPaths:
1584  self.schedule.append(getattr(self.process,'Flag_'+filt))
1585 
1586  def prepare_L1HwVal(self, sequence = 'L1HwVal'):
1587  ''' Enrich the schedule with L1 HW validation '''
1588  self.loadDefaultOrSpecifiedCFF(sequence,self.L1HwValDefaultCFF)
1589  #self.scheduleSequence(sequence.split('.')[-1],'l1hwval_step')
1590  print '\n\n\n DEPRECATED this has no action \n\n\n'
1591  return
1592 
1593  def prepare_L1Reco(self, sequence = "L1Reco"):
1594  ''' Enrich the schedule with L1 reconstruction '''
1595  self.loadDefaultOrSpecifiedCFF(sequence,self.L1RecoDefaultCFF)
1596  self.scheduleSequence(sequence.split('.')[-1],'L1Reco_step')
1597  return
1598 
1599  def prepare_L1TrackTrigger(self, sequence = "L1TrackTrigger"):
1600  ''' Enrich the schedule with L1 reconstruction '''
1602  self.scheduleSequence(sequence.split('.')[-1],'L1TrackTrigger_step')
1603  return
1604 
1605  def prepare_FILTER(self, sequence = None):
1606  ''' Enrich the schedule with a user defined filter sequence '''
1607  ## load the relevant part
1608  filterConfig=self.load(sequence.split('.')[0])
1609  filterSeq=sequence.split('.')[-1]
1610  ## print it in the configuration
1612  def __init__(self):
1613  self.inliner=''
1614  pass
1615  def enter(self,visitee):
1616  try:
1617  label=visitee.label()
1618  ##needs to be in reverse order
1619  self.inliner=label+','+self.inliner
1620  except:
1621  pass
1622  def leave(self,v): pass
1623 
1624  expander=PrintAllModules()
1625  getattr(self.process,filterSeq).visit( expander )
1626  self._options.inlineObjets+=','+expander.inliner
1627  self._options.inlineObjets+=','+filterSeq
1628 
1629  ## put the filtering path in the schedule
1630  self.scheduleSequence(filterSeq,'filtering_step')
1631  self.nextScheduleIsConditional=True
1632  ## put it before all the other paths
1633  self.productionFilterSequence = filterSeq
1634 
1635  return
1636 
1637  def prepare_RECO(self, sequence = "reconstruction"):
1638  ''' Enrich the schedule with reconstruction '''
1639  self.loadDefaultOrSpecifiedCFF(sequence,self.RECODefaultCFF)
1640  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_step')
1641  return
1642 
1643  def prepare_RECOBEFMIX(self, sequence = "reconstruction"):
1644  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1645  if not self._options.fast:
1646  print "ERROR: this step is only implemented for FastSim"
1647  sys.exit()
1648  self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1649  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_befmix_step')
1650  return
1651 
1652  def prepare_PAT(self, sequence = "miniAOD"):
1653  ''' Enrich the schedule with PAT '''
1654  self.prepare_PATFILTER(self)
1655  self.loadDefaultOrSpecifiedCFF(sequence,self.PATDefaultCFF)
1656  self.labelsToAssociate.append('patTask')
1657  if not self._options.runUnscheduled:
1658  raise Exception("MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1659  if self._options.isData:
1660  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1661  else:
1662  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1663  if self._options.fast:
1664  self._options.customisation_file_unsch.insert(1,"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff.miniAOD_customizeMETFiltersFastSim")
1665 
1666  if self._options.hltProcess:
1667  if len(self._options.customise_commands) > 1:
1668  self._options.customise_commands = self._options.customise_commands + " \n"
1669  self._options.customise_commands = self._options.customise_commands + "process.patTrigger.processName = \""+self._options.hltProcess+"\""
1670 # self.renameHLTprocessInSequence(sequence)
1671 
1672  return
1673 
1674  def prepare_EI(self, sequence = None):
1675  ''' Enrich the schedule with event interpretation '''
1676  from Configuration.StandardSequences.EventInterpretation import EventInterpretation
1677  if sequence in EventInterpretation:
1678  self.EIDefaultCFF = EventInterpretation[sequence]
1679  sequence = 'EIsequence'
1680  else:
1681  raise Exception('Cannot set %s event interpretation'%( sequence) )
1682  self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1683  self.scheduleSequence(sequence.split('.')[-1],'eventinterpretaion_step')
1684  return
1685 
1686  def prepare_SKIM(self, sequence = "all"):
1687  ''' Enrich the schedule with skimming fragments'''
1688  skimConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.SKIMDefaultCFF)
1689  sequence = sequence.split('.')[-1]
1690 
1691  skimlist=sequence.split('+')
1692  ## support @Mu+DiJet+@Electron configuration via autoSkim.py
1693  from Configuration.Skimming.autoSkim import autoSkim
1694  self.expandMapping(skimlist,autoSkim)
1695 
1696  #print "dictionnary for skims:",skimConfig.__dict__
1697  for skim in skimConfig.__dict__:
1698  skimstream = getattr(skimConfig,skim)
1699  if isinstance(skimstream,cms.Path):
1700  #black list the alca path so that they do not appear in the cfg
1701  self.blacklist_paths.append(skimstream)
1702  if (not isinstance(skimstream,cms.FilteredStream)):
1703  continue
1704  shortname = skim.replace('SKIMStream','')
1705  if (sequence=="all"):
1706  self.addExtraStream(skim,skimstream)
1707  elif (shortname in skimlist):
1708  self.addExtraStream(skim,skimstream)
1709  #add a DQM eventcontent for this guy
1710  if self._options.datatier=='DQM':
1711  self.process.load(self.EVTCONTDefaultCFF)
1712  skimstreamDQM = cms.FilteredStream(
1713  responsible = skimstream.responsible,
1714  name = skimstream.name+'DQM',
1715  paths = skimstream.paths,
1716  selectEvents = skimstream.selectEvents,
1717  content = self._options.datatier+'EventContent',
1718  dataTier = cms.untracked.string(self._options.datatier)
1719  )
1720  self.addExtraStream(skim+'DQM',skimstreamDQM)
1721  for i in range(skimlist.count(shortname)):
1722  skimlist.remove(shortname)
1723 
1724 
1725 
1726  if (skimlist.__len__()!=0 and sequence!="all"):
1727  print 'WARNING, possible typo with SKIM:'+'+'.join(skimlist)
1728  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1729 
1730  def prepare_USER(self, sequence = None):
1731  ''' Enrich the schedule with a user defined sequence '''
1732  self.loadDefaultOrSpecifiedCFF(sequence,self.USERDefaultCFF)
1733  self.scheduleSequence(sequence.split('.')[-1],'user_step')
1734  return
1735 
1736  def prepare_POSTRECO(self, sequence = None):
1737  """ Enrich the schedule with the postreco step """
1738  self.loadAndRemember(self.POSTRECODefaultCFF)
1739  self.scheduleSequence('postreco_generator','postreco_step')
1740  return
1741 
1742 
1743  def prepare_VALIDATION(self, sequence = 'validation'):
1744  print sequence,"in preparing validation"
1745  self.loadDefaultOrSpecifiedCFF(sequence,self.VALIDATIONDefaultCFF)
1746  from Validation.Configuration.autoValidation import autoValidation
1747  #in case VALIDATION:something:somethingelse -> something,somethingelse
1748  sequence=sequence.split('.')[-1]
1749  if sequence.find(',')!=-1:
1750  prevalSeqName=sequence.split(',')[0].split('+')
1751  valSeqName=sequence.split(',')[1].split('+')
1752  self.expandMapping(prevalSeqName,autoValidation,index=0)
1753  self.expandMapping(valSeqName,autoValidation,index=1)
1754  else:
1755  if '@' in sequence:
1756  prevalSeqName=sequence.split('+')
1757  valSeqName=sequence.split('+')
1758  self.expandMapping(prevalSeqName,autoValidation,index=0)
1759  self.expandMapping(valSeqName,autoValidation,index=1)
1760  else:
1761  postfix=''
1762  if sequence:
1763  postfix='_'+sequence
1764  prevalSeqName=['prevalidation'+postfix]
1765  valSeqName=['validation'+postfix]
1766  if not hasattr(self.process,valSeqName[0]):
1767  prevalSeqName=['']
1768  valSeqName=[sequence]
1769 
1770  def NFI(index):
1771  ##name from index, required to keep backward compatibility
1772  if index==0:
1773  return ''
1774  else:
1775  return '%s'%index
1776 
1777 
1778  #rename the HLT process in validation steps
1779  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1780  for s in valSeqName+prevalSeqName:
1781  if s:
1783  for (i,s) in enumerate(prevalSeqName):
1784  if s:
1785  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1786  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1787 
1788  for (i,s) in enumerate(valSeqName):
1789  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1790  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1791 
1792  #needed in case the miniAODValidation sequence is run starting from AODSIM
1793  if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1794  return
1795 
1796  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1797  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1798  self._options.restoreRNDSeeds=True
1799 
1800  if not 'DIGI' in self.stepMap and not self._options.fast:
1801  self.executeAndRemember("process.mix.playback = True")
1802  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1803  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1804  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1805 
1806  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1807  #will get in the schedule, smoothly
1808  for (i,s) in enumerate(valSeqName):
1809  getattr(self.process,'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,'validation_step%s'%NFI(i))._seq
1810 
1811  return
1812 
1813 
1815  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1816  It will climb down within PSets, VPSets and VInputTags to find its target"""
1817  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1818  self._paramReplace = paramReplace
1819  self._paramSearch = paramSearch
1820  self._verbose = verbose
1821  self._whitelist = whitelist
1822 
1823  def doIt(self,pset,base):
1824  if isinstance(pset, cms._Parameterizable):
1825  for name in pset.parameters_().keys():
1826  # skip whitelisted parameters
1827  if name in self._whitelist:
1828  continue
1829  # if I use pset.parameters_().items() I get copies of the parameter values
1830  # so I can't modify the nested pset
1831  value = getattr(pset,name)
1832  type = value.pythonTypeName()
1833  if type in ('cms.PSet', 'cms.untracked.PSet'):
1834  self.doIt(value,base+"."+name)
1835  elif type in ('cms.VPSet', 'cms.untracked.VPSet'):
1836  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1837  elif type in ('cms.string', 'cms.untracked.string'):
1838  if value.value() == self._paramSearch:
1839  if self._verbose: print "set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace)
1840  setattr(pset, name,self._paramReplace)
1841  elif type in ('cms.VInputTag', 'cms.untracked.VInputTag'):
1842  for (i,n) in enumerate(value):
1843  if not isinstance(n, cms.InputTag):
1844  n=cms.InputTag(n)
1845  if n.processName == self._paramSearch:
1846  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1847  if self._verbose:print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace)
1848  setattr(n,"processName",self._paramReplace)
1849  value[i]=n
1850  elif type in ('cms.vstring', 'cms.untracked.vstring'):
1851  for (i,n) in enumerate(value):
1852  if n==self._paramSearch:
1853  getattr(pset,name)[i]=self._paramReplace
1854  elif type in ('cms.InputTag', 'cms.untracked.InputTag'):
1855  if value.processName == self._paramSearch:
1856  if self._verbose: print "set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace)
1857  setattr(getattr(pset, name),"processName",self._paramReplace)
1858 
1859  def enter(self,visitee):
1860  label = ''
1861  try:
1862  label = visitee.label()
1863  except AttributeError:
1864  label = '<Module not in a Process>'
1865  except:
1866  label = 'other execption'
1867  self.doIt(visitee, label)
1868 
1869  def leave(self,visitee):
1870  pass
1871 
1872  #visit a sequence to repalce all input tags
1873  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1874  print "Replacing all InputTag %s => %s"%(oldT,newT)
1875  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1876  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1877  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1878  if not loadMe in self.additionalCommands:
1879  self.additionalCommands.append(loadMe)
1880  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1881 
1882  #change the process name used to address HLT results in any sequence
1883  def renameHLTprocessInSequence(self,sequence,proc=None,HLTprocess='HLT'):
1884  if self._options.hltProcess:
1885  proc=self._options.hltProcess
1886  else:
1887  proc=self.process.name_()
1888  if proc==HLTprocess: return
1889  # look up all module in dqm sequence
1890  print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1891  getattr(self.process,sequence).visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess,proc,whitelist = ("subSystemFolder",)))
1892  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1893  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1894  self.additionalCommands.append('process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1895 
1896 
1897  def expandMapping(self,seqList,mapping,index=None):
1898  maxLevel=20
1899  level=0
1900  while '@' in repr(seqList) and level<maxLevel:
1901  level+=1
1902  for specifiedCommand in seqList:
1903  if specifiedCommand.startswith('@'):
1904  location=specifiedCommand[1:]
1905  if not location in mapping:
1906  raise Exception("Impossible to map "+location+" from "+repr(mapping))
1907  mappedTo=mapping[location]
1908  if index!=None:
1909  mappedTo=mappedTo[index]
1910  seqList.remove(specifiedCommand)
1911  seqList.extend(mappedTo.split('+'))
1912  break;
1913  if level==maxLevel:
1914  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
1915 
1916  def prepare_DQM(self, sequence = 'DQMOffline'):
1917  # this one needs replacement
1918 
1919  self.loadDefaultOrSpecifiedCFF(sequence,self.DQMOFFLINEDefaultCFF)
1920  sequenceList=sequence.split('.')[-1].split('+')
1921  postSequenceList=sequence.split('.')[-1].split('+')
1922  from DQMOffline.Configuration.autoDQM import autoDQM
1923  self.expandMapping(sequenceList,autoDQM,index=0)
1924  self.expandMapping(postSequenceList,autoDQM,index=1)
1925 
1926  if len(set(sequenceList))!=len(sequenceList):
1927  sequenceList=list(set(sequenceList))
1928  print "Duplicate entries for DQM:, using",sequenceList
1929 
1930  pathName='dqmoffline_step'
1931  for (i,sequence) in enumerate(sequenceList):
1932  if (i!=0):
1933  pathName='dqmoffline_%d_step'%(i)
1934 
1935  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
1936  self.renameHLTprocessInSequence(sequence)
1937 
1938  setattr(self.process,pathName, cms.EndPath( getattr(self.process,sequence ) ) )
1939  self.schedule.append(getattr(self.process,pathName))
1940 
1941  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1942  #will get in the schedule, smoothly
1943  getattr(self.process,pathName).insert(0,self.process.genstepfilter)
1944 
1945  pathName='dqmofflineOnPAT_step'
1946  for (i,sequence) in enumerate(postSequenceList):
1947  if (i!=0):
1948  pathName='dqmofflineOnPAT_%d_step'%(i)
1949 
1950  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1951  self.schedule.append(getattr(self.process,pathName))
1952 
1953  def prepare_HARVESTING(self, sequence = None):
1954  """ Enrich the process with harvesting step """
1955  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
1956  self.loadAndRemember(self.DQMSaverCFF)
1957 
1958  harvestingConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.HARVESTINGDefaultCFF)
1959  sequence = sequence.split('.')[-1]
1960 
1961  # decide which HARVESTING paths to use
1962  harvestingList = sequence.split("+")
1963  from DQMOffline.Configuration.autoDQM import autoDQM
1964  from Validation.Configuration.autoValidation import autoValidation
1965  import copy
1966  combined_mapping = copy.deepcopy( autoDQM )
1967  combined_mapping.update( autoValidation )
1968  self.expandMapping(harvestingList,combined_mapping,index=-1)
1969 
1970  if len(set(harvestingList))!=len(harvestingList):
1971  harvestingList=list(set(harvestingList))
1972  print "Duplicate entries for HARVESTING, using",harvestingList
1973 
1974  for name in harvestingList:
1975  if not name in harvestingConfig.__dict__:
1976  print name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1977  continue
1978  harvestingstream = getattr(harvestingConfig,name)
1979  if isinstance(harvestingstream,cms.Path):
1980  self.schedule.append(harvestingstream)
1981  self.blacklist_paths.append(harvestingstream)
1982  if isinstance(harvestingstream,cms.Sequence):
1983  setattr(self.process,name+"_step",cms.Path(harvestingstream))
1984  self.schedule.append(getattr(self.process,name+"_step"))
1985 
1986  self.scheduleSequence('DQMSaver','dqmsave_step')
1987  return
1988 
1989  def prepare_ALCAHARVEST(self, sequence = None):
1990  """ Enrich the process with AlCaHarvesting step """
1991  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
1992  sequence=sequence.split(".")[-1]
1993 
1994  # decide which AlcaHARVESTING paths to use
1995  harvestingList = sequence.split("+")
1996 
1997 
1998 
1999  from Configuration.AlCa.autoPCL import autoPCL
2000  self.expandMapping(harvestingList,autoPCL)
2001 
2002  for name in harvestingConfig.__dict__:
2003  harvestingstream = getattr(harvestingConfig,name)
2004  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2005  self.schedule.append(harvestingstream)
2006  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2007  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2008  harvestingList.remove(name)
2009  # append the common part at the end of the sequence
2010  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2011  self.schedule.append(lastStep)
2012 
2013  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2014  print "The following harvesting could not be found : ", harvestingList
2015  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2016 
2017 
2018 
2019  def prepare_ENDJOB(self, sequence = 'endOfProcess'):
2020  self.loadDefaultOrSpecifiedCFF(sequence,self.ENDJOBDefaultCFF)
2021  self.scheduleSequenceAtEnd(sequence.split('.')[-1],'endjob_step')
2022  return
2023 
2025  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2026  self.schedule.append(self.process.reconstruction)
2027 
2028 
2029  def build_production_info(self, evt_type, evtnumber):
2030  """ Add useful info for the production. """
2031  self.process.configurationMetadata=cms.untracked.PSet\
2032  (version=cms.untracked.string("$Revision: 1.19 $"),
2033  name=cms.untracked.string("Applications"),
2034  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2035  )
2036 
2037  self.addedObjects.append(("Production Info","configurationMetadata"))
2038 
2039 
2040  def prepare(self, doChecking = False):
2041  """ Prepare the configuration string and add missing pieces."""
2042 
2043  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2044  self.addMaxEvents()
2045  if self.with_input:
2046  self.addSource()
2047  self.addStandardSequences()
2048  ##adding standard sequences might change the inputEventContent option and therefore needs to be finalized after
2049  self.completeInputCommand()
2050  self.addConditions()
2051 
2052 
2053  outputModuleCfgCode=""
2054  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2055  outputModuleCfgCode=self.addOutput()
2056 
2057  self.addCommon()
2058 
2059  self.pythonCfgCode = "# Auto generated configuration file\n"
2060  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2061  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2062  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2063  if hasattr(self._options,"era") and self._options.era :
2064  self.pythonCfgCode += "from Configuration.StandardSequences.Eras import eras\n\n"
2065  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"'" # Start of the line, finished after the loop
2066  # Multiple eras can be specified in a comma seperated list
2067  for requestedEra in self._options.era.split(",") :
2068  self.pythonCfgCode += ",eras."+requestedEra
2069  self.pythonCfgCode += ")\n\n" # end of the line
2070  else :
2071  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"')\n\n"
2072 
2073  self.pythonCfgCode += "# import of standard configurations\n"
2074  for module in self.imports:
2075  self.pythonCfgCode += ("process.load('"+module+"')\n")
2076 
2077  # production info
2078  if not hasattr(self.process,"configurationMetadata"):
2079  self.build_production_info(self._options.evt_type, self._options.number)
2080  else:
2081  #the PSet was added via a load
2082  self.addedObjects.append(("Production Info","configurationMetadata"))
2083 
2084  self.pythonCfgCode +="\n"
2085  for comment,object in self.addedObjects:
2086  if comment!="":
2087  self.pythonCfgCode += "\n# "+comment+"\n"
2088  self.pythonCfgCode += dumpPython(self.process,object)
2089 
2090  # dump the output definition
2091  self.pythonCfgCode += "\n# Output definition\n"
2092  self.pythonCfgCode += outputModuleCfgCode
2093 
2094  # dump all additional outputs (e.g. alca or skim streams)
2095  self.pythonCfgCode += "\n# Additional output definition\n"
2096  #I do not understand why the keys are not normally ordered.
2097  nl=self.additionalOutputs.keys()
2098  nl.sort()
2099  for name in nl:
2100  output = self.additionalOutputs[name]
2101  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2102  tmpOut = cms.EndPath(output)
2103  setattr(self.process,name+'OutPath',tmpOut)
2104  self.schedule.append(tmpOut)
2105 
2106  # dump all additional commands
2107  self.pythonCfgCode += "\n# Other statements\n"
2108  for command in self.additionalCommands:
2109  self.pythonCfgCode += command + "\n"
2110 
2111  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2112  for object in self._options.inlineObjets.split(','):
2113  if not object:
2114  continue
2115  if not hasattr(self.process,object):
2116  print 'cannot inline -'+object+'- : not known'
2117  else:
2118  self.pythonCfgCode +='\n'
2119  self.pythonCfgCode +=dumpPython(self.process,object)
2120 
2121  # dump all paths
2122  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2123  for path in self.process.paths:
2124  if getattr(self.process,path) not in self.blacklist_paths:
2125  self.pythonCfgCode += dumpPython(self.process,path)
2126 
2127  for endpath in self.process.endpaths:
2128  if getattr(self.process,endpath) not in self.blacklist_paths:
2129  self.pythonCfgCode += dumpPython(self.process,endpath)
2130 
2131  # dump the schedule
2132  self.pythonCfgCode += "\n# Schedule definition\n"
2133  result = "process.schedule = cms.Schedule("
2134 
2135  # handling of the schedule
2136  self.process.schedule = cms.Schedule()
2137  for item in self.schedule:
2138  if not isinstance(item, cms.Schedule):
2139  self.process.schedule.append(item)
2140  else:
2141  self.process.schedule.extend(item)
2142 
2143  if hasattr(self.process,"HLTSchedule"):
2144  beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2145  afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2146  pathNames = ['process.'+p.label_() for p in beforeHLT]
2147  result += ','.join(pathNames)+')\n'
2148  result += 'process.schedule.extend(process.HLTSchedule)\n'
2149  pathNames = ['process.'+p.label_() for p in afterHLT]
2150  result += 'process.schedule.extend(['+','.join(pathNames)+'])\n'
2151  else:
2152  pathNames = ['process.'+p.label_() for p in self.schedule]
2153  result ='process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2154 
2155  self.pythonCfgCode += result
2156 
2157  for labelToAssociate in self.labelsToAssociate:
2158  self.process.schedule.associate(getattr(self.process, labelToAssociate))
2159  self.pythonCfgCode += 'process.schedule.associate(process.' + labelToAssociate + ')\n'
2160 
2161  from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
2162  associatePatAlgosToolsTask(self.process)
2163  self.pythonCfgCode+="from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n"
2164  self.pythonCfgCode+="associatePatAlgosToolsTask(process)\n"
2165 
2166  if self._options.nThreads is not "1":
2167  self.pythonCfgCode +="\n"
2168  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2169  self.pythonCfgCode +="process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+")\n"
2170  self.pythonCfgCode +="process.options.numberOfStreams=cms.untracked.uint32(0)\n"
2171  #repacked version
2172  if self._options.isRepacked:
2173  self.pythonCfgCode +="\n"
2174  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2175  self.pythonCfgCode +="MassReplaceInputTag(process)\n"
2176  MassReplaceInputTag(self.process)
2177 
2178  # special treatment in case of production filter sequence 2/2
2179  if self.productionFilterSequence:
2180  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2181  self.pythonCfgCode +='for path in process.paths:\n'
2182  if len(self.conditionalPaths):
2183  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2184  if len(self.excludedPaths):
2185  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2186  self.pythonCfgCode +='\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2187  pfs = getattr(self.process,self.productionFilterSequence)
2188  for path in self.process.paths:
2189  if not path in self.conditionalPaths: continue
2190  if path in self.excludedPaths: continue
2191  getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2192 
2193 
2194  # dump customise fragment
2195  self.pythonCfgCode += self.addCustomise()
2196 
2197  if self._options.runUnscheduled:
2198  # prune and delete paths
2199  #this is not supporting the blacklist at this point since I do not understand it
2200  self.pythonCfgCode+="#do not add changes to your config after this point (unless you know what you are doing)\n"
2201  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import convertToUnscheduled\n"
2202  self.pythonCfgCode+="process=convertToUnscheduled(process)\n"
2203 
2204  from FWCore.ParameterSet.Utilities import convertToUnscheduled
2205  self.process=convertToUnscheduled(self.process)
2206 
2207  self.pythonCfgCode += self.addCustomise(1)
2208 
2209  self.pythonCfgCode += self.addCustomiseCmdLine()
2210 
2211  # Temporary hack to put the early delete customization after
2212  # everything else
2213  #
2214  # FIXME: remove when no longer needed
2215  self.pythonCfgCode += "\n# Add early deletion of temporary data products to reduce peak memory need\n"
2216  self.pythonCfgCode += "from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2217  self.pythonCfgCode += "process = customiseEarlyDelete(process)\n"
2218  self.pythonCfgCode += "# End adding early deletion\n"
2219  from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
2220  self.process = customiseEarlyDelete(self.process)
2221 
2222 
2223  # make the .io file
2224 
2225  if self._options.io:
2226  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2227  if not self._options.io.endswith('.io'): self._option.io+='.io'
2228  io=open(self._options.io,'w')
2229  ioJson={}
2230  if hasattr(self.process.source,"fileNames"):
2231  if len(self.process.source.fileNames.value()):
2232  ioJson['primary']=self.process.source.fileNames.value()
2233  if hasattr(self.process.source,"secondaryFileNames"):
2234  if len(self.process.source.secondaryFileNames.value()):
2235  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2236  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2237  ioJson['pileup']=self._options.pileup_input[4:]
2238  for (o,om) in self.process.outputModules_().items():
2239  ioJson[o]=om.fileName.value()
2240  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2241  if self.productionFilterSequence:
2242  ioJson['filter']=self.productionFilterSequence
2243  import json
2244  io.write(json.dumps(ioJson))
2245  return
2246 
def load(self, includeFile)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:34
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
Definition: MassReplace.py:71
def prepare_L1REPACK(self, sequence=None)
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def doNotInlineEventContent(instance, label="cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)")
event content
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def ProcessName(process)
Definition: CustomConfigs.py:8
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_DIGIPREMIX(self, sequence=None)
def prepare_RECO(self, sequence="reconstruction")
put the filtering path in the schedule
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def addCustomise(self, unsch=0)
def prepare_DIGIPREMIX_S2(self, sequence=None)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
now for a useful command
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
Definition: HCMethods.h:49
def prepare_DIGI(self, sequence=None)
def convertToUnscheduled(proc)
Definition: Utilities.py:45
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
inliner
needs to be in reverse order
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def prepare_ALCAHARVEST(self, sequence=None)
def defineMixing(dict)
Definition: Mixing.py:176
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
Definition: helpers.py:23
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
double split
Definition: MVATrainer.cc:139
def completeInputCommand(self)
add the corresponding input content
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def prepare_PAT(self, sequence="miniAOD")