test
CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python
2 
3 __version__ = "$Revision: 1.19 $"
4 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
5 
6 import FWCore.ParameterSet.Config as cms
7 from FWCore.ParameterSet.Modules import _Module
8 import sys
9 import re
10 import collections
11 from subprocess import Popen,PIPE
12 import FWCore.ParameterSet.DictTypes as DictTypes
13 class Options:
14  pass
15 
16 # the canonical defaults
17 defaultOptions = Options()
18 defaultOptions.datamix = 'DataOnSim'
19 defaultOptions.isMC=False
20 defaultOptions.isData=True
21 defaultOptions.step=''
22 defaultOptions.pileup='NoPileUp'
23 defaultOptions.pileup_input = None
24 defaultOptions.pileup_dasoption = ''
25 defaultOptions.geometry = 'SimDB'
26 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
27 defaultOptions.magField = ''
28 defaultOptions.conditions = None
29 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
30 defaultOptions.harvesting= 'AtRunEnd'
31 defaultOptions.gflash = False
32 defaultOptions.number = -1
33 defaultOptions.number_out = None
34 defaultOptions.arguments = ""
35 defaultOptions.name = "NO NAME GIVEN"
36 defaultOptions.evt_type = ""
37 defaultOptions.filein = ""
38 defaultOptions.dasquery=""
39 defaultOptions.dasoption=""
40 defaultOptions.secondfilein = ""
41 defaultOptions.customisation_file = []
42 defaultOptions.customisation_file_unsch = []
43 defaultOptions.customise_commands = ""
44 defaultOptions.inline_custom=False
45 defaultOptions.particleTable = 'pythiapdt'
46 defaultOptions.particleTableList = ['pythiapdt','pdt']
47 defaultOptions.dirin = ''
48 defaultOptions.dirout = ''
49 defaultOptions.filetype = 'EDM'
50 defaultOptions.fileout = 'output.root'
51 defaultOptions.filtername = ''
52 defaultOptions.lazy_download = False
53 defaultOptions.custom_conditions = ''
54 defaultOptions.hltProcess = ''
55 defaultOptions.eventcontent = None
56 defaultOptions.datatier = None
57 defaultOptions.inlineEventContent = True
58 defaultOptions.inlineObjets =''
59 defaultOptions.hideGen=False
60 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
61 defaultOptions.beamspot=None
62 defaultOptions.outputDefinition =''
63 defaultOptions.inputCommands = None
64 defaultOptions.outputCommands = None
65 defaultOptions.inputEventContent = ''
66 defaultOptions.dropDescendant = False
67 defaultOptions.relval = None
68 defaultOptions.profile = None
69 defaultOptions.isRepacked = False
70 defaultOptions.restoreRNDSeeds = False
71 defaultOptions.donotDropOnInput = ''
72 defaultOptions.python_filename =''
73 defaultOptions.io=None
74 defaultOptions.lumiToProcess=None
75 defaultOptions.fast=False
76 defaultOptions.runsAndWeightsForMC = None
77 defaultOptions.runsScenarioForMC = None
78 defaultOptions.runUnscheduled = False
79 defaultOptions.timeoutOutput = False
80 defaultOptions.nThreads = '1'
81 
82 # some helper routines
83 def dumpPython(process,name):
84  theObject = getattr(process,name)
85  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
86  return "process."+name+" = " + theObject.dumpPython("process")
87  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
88  return "process."+name+" = " + theObject.dumpPython()+"\n"
89  else:
90  return "process."+name+" = " + theObject.dumpPython()+"\n"
91 def filesFromList(fileName,s=None):
92  import os
93  import FWCore.ParameterSet.Config as cms
94  prim=[]
95  sec=[]
96  for line in open(fileName,'r'):
97  if line.count(".root")>=2:
98  #two files solution...
99  entries=line.replace("\n","").split()
100  if not entries[0] in prim:
101  prim.append(entries[0])
102  if not entries[1] in sec:
103  sec.append(entries[1])
104  elif (line.find(".root")!=-1):
105  entry=line.replace("\n","")
106  if not entry in prim:
107  prim.append(entry)
108  if s:
109  if not hasattr(s,"fileNames"):
110  s.fileNames=cms.untracked.vstring(prim)
111  else:
112  s.fileNames.extend(prim)
113  if len(sec)!=0:
114  if not hasattr(s,"secondaryFileNames"):
115  s.secondaryFileNames=cms.untracked.vstring(sec)
116  else:
117  s.secondaryFileNames.extend(sec)
118  print "found files: ",prim
119  if len(prim)==0:
120  raise Exception("There are not files in input from the file list")
121  if len(sec)!=0:
122  print "found parent files:",sec
123  return (prim,sec)
124 
125 def filesFromDASQuery(query,option="",s=None):
126  import os,time
127  import FWCore.ParameterSet.Config as cms
128  prim=[]
129  sec=[]
130  print "the query is",query
131  eC=5
132  count=0
133  while eC!=0 and count<3:
134  if count!=0:
135  print 'Sleeping, then retrying DAS'
136  time.sleep(100)
137  p = Popen('das_client %s --query "%s"'%(option,query), stdout=PIPE,shell=True)
138  pipe=p.stdout.read()
139  tupleP = os.waitpid(p.pid, 0)
140  eC=tupleP[1]
141  count=count+1
142  if eC==0:
143  print "DAS succeeded after",count,"attempts",eC
144  else:
145  print "DAS failed 3 times- I give up"
146  for line in pipe.split('\n'):
147  if line.count(".root")>=2:
148  #two files solution...
149  entries=line.replace("\n","").split()
150  if not entries[0] in prim:
151  prim.append(entries[0])
152  if not entries[1] in sec:
153  sec.append(entries[1])
154  elif (line.find(".root")!=-1):
155  entry=line.replace("\n","")
156  if not entry in prim:
157  prim.append(entry)
158  if s:
159  if not hasattr(s,"fileNames"):
160  s.fileNames=cms.untracked.vstring(prim)
161  else:
162  s.fileNames.extend(prim)
163  if len(sec)!=0:
164  if not hasattr(s,"secondaryFileNames"):
165  s.secondaryFileNames=cms.untracked.vstring(sec)
166  else:
167  s.secondaryFileNames.extend(sec)
168  print "found files: ",prim
169  if len(sec)!=0:
170  print "found parent files:",sec
171  return (prim,sec)
172 
173 def MassReplaceInputTag(aProcess,oldT="rawDataCollector",newT="rawDataRepacker"):
174  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
175  for s in aProcess.paths_().keys():
176  massSearchReplaceAnyInputTag(getattr(aProcess,s),oldT,newT)
177  for s in aProcess.endpaths_().keys():
178  massSearchReplaceAnyInputTag(getattr(aProcess,s),oldT,newT)
179 
180 def anyOf(listOfKeys,dict,opt=None):
181  for k in listOfKeys:
182  if k in dict:
183  toReturn=dict[k]
184  dict.pop(k)
185  return toReturn
186  if opt!=None:
187  return opt
188  else:
189  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
190 
191 class ConfigBuilder(object):
192  """The main building routines """
193 
194  def __init__(self, options, process = None, with_output = False, with_input = False ):
195  """options taken from old cmsDriver and optparse """
196 
197  options.outfile_name = options.dirout+options.fileout
198 
199  self._options = options
200 
201  if self._options.isData and options.isMC:
202  raise Exception("ERROR: You may specify only --data or --mc, not both")
203  #if not self._options.conditions:
204  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
205 
206  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
207  if 'ENDJOB' in self._options.step:
208  if (hasattr(self._options,"outputDefinition") and \
209  self._options.outputDefinition != '' and \
210  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
211  (hasattr(self._options,"datatier") and \
212  self._options.datatier and \
213  'DQMIO' in self._options.datatier):
214  print "removing ENDJOB from steps since not compatible with DQMIO dataTier"
215  self._options.step=self._options.step.replace(',ENDJOB','')
216 
217 
218 
219  # what steps are provided by this class?
220  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
221  self.stepMap={}
222  self.stepKeys=[]
223  for step in self._options.step.split(","):
224  if step=='': continue
225  stepParts = step.split(":")
226  stepName = stepParts[0]
227  if stepName not in stepList and not stepName.startswith('re'):
228  raise ValueError("Step "+stepName+" unknown")
229  if len(stepParts)==1:
230  self.stepMap[stepName]=""
231  elif len(stepParts)==2:
232  self.stepMap[stepName]=stepParts[1].split('+')
233  elif len(stepParts)==3:
234  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
235  else:
236  raise ValueError("Step definition "+step+" invalid")
237  self.stepKeys.append(stepName)
238 
239  #print "map of steps is:",self.stepMap
240 
241  self.with_output = with_output
242  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
243  self.with_output = False
244  self.with_input = with_input
245  if process == None:
246  self.process = cms.Process(self._options.name)
247  else:
248  self.process = process
249  self.imports = []
250  self.importsUnsch = []
251  self.define_Configs()
252  self.schedule = list()
253 
254  # we are doing three things here:
255  # creating a process to catch errors
256  # building the code to re-create the process
257 
258  self.additionalCommands = []
259  # TODO: maybe a list of to be dumped objects would help as well
260  self.blacklist_paths = []
261  self.addedObjects = []
262  self.additionalOutputs = {}
263 
264  self.productionFilterSequence = None
265  self.nextScheduleIsConditional=False
266  self.conditionalPaths=[]
267  self.excludedPaths=[]
268 
269  def profileOptions(self):
270  """
271  addIgProfService
272  Function to add the igprof profile service so that you can dump in the middle
273  of the run.
274  """
275  profileOpts = self._options.profile.split(':')
276  profilerStart = 1
277  profilerInterval = 100
278  profilerFormat = None
279  profilerJobFormat = None
280 
281  if len(profileOpts):
282  #type, given as first argument is unused here
283  profileOpts.pop(0)
284  if len(profileOpts):
285  startEvent = profileOpts.pop(0)
286  if not startEvent.isdigit():
287  raise Exception("%s is not a number" % startEvent)
288  profilerStart = int(startEvent)
289  if len(profileOpts):
290  eventInterval = profileOpts.pop(0)
291  if not eventInterval.isdigit():
292  raise Exception("%s is not a number" % eventInterval)
293  profilerInterval = int(eventInterval)
294  if len(profileOpts):
295  profilerFormat = profileOpts.pop(0)
296 
297 
298  if not profilerFormat:
299  profilerFormat = "%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace("_cfi", ""),
300  self._options.step,
301  self._options.pileup,
302  self._options.conditions,
303  self._options.datatier,
304  self._options.profileTypeLabel)
305  if not profilerJobFormat and profilerFormat.endswith(".gz"):
306  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
307  elif not profilerJobFormat:
308  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
309 
310  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
311 
312  def load(self,includeFile):
313  includeFile = includeFile.replace('/','.')
314  self.process.load(includeFile)
315  return sys.modules[includeFile]
316 
317  def loadAndRemember(self, includeFile,unsch=0):
318  """helper routine to load am memorize imports"""
319  # we could make the imports a on-the-fly data method of the process instance itself
320  # not sure if the latter is a good idea
321  includeFile = includeFile.replace('/','.')
322  if unsch==0:
323  self.imports.append(includeFile)
324  self.process.load(includeFile)
325  return sys.modules[includeFile]
326  else:
327  self.importsUnsch.append(includeFile)
328  return 0#sys.modules[includeFile]
329 
330  def executeAndRemember(self, command):
331  """helper routine to remember replace statements"""
332  self.additionalCommands.append(command)
333  if not command.strip().startswith("#"):
334  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
335  import re
336  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
337  #exec(command.replace("process.","self.process."))
338 
339  def addCommon(self):
340  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
341  self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring('ProductNotFound'),fileMode = cms.untracked.string('FULLMERGE'))
342  else:
343  self.process.options = cms.untracked.PSet( )
344 
345  if self._options.runUnscheduled:
346  self.process.options.allowUnscheduled=cms.untracked.bool(True)
347 
348  self.addedObjects.append(("","options"))
349 
350  if self._options.lazy_download:
351  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
352  stats = cms.untracked.bool(True),
353  enable = cms.untracked.bool(True),
354  cacheHint = cms.untracked.string("lazy-download"),
355  readHint = cms.untracked.string("read-ahead-buffered")
356  )
357  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
358 
359  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
360  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
361 
362  if self._options.profile:
363  (start, interval, eventFormat, jobFormat)=self.profileOptions()
364  self.process.IgProfService = cms.Service("IgProfService",
365  reportFirstEvent = cms.untracked.int32(start),
366  reportEventInterval = cms.untracked.int32(interval),
367  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
368  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
369  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
370 
371  def addMaxEvents(self):
372  """Here we decide how many evts will be processed"""
373  self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(int(self._options.number)))
374  if self._options.number_out:
375  self.process.maxEvents.output = cms.untracked.int32(int(self._options.number_out))
376  self.addedObjects.append(("","maxEvents"))
377 
378  def addSource(self):
379  """Here the source is built. Priority: file, generator"""
380  self.addedObjects.append(("Input source","source"))
381 
382  def filesFromOption(self):
383  for entry in self._options.filein.split(','):
384  print "entry",entry
385  if entry.startswith("filelist:"):
386  filesFromList(entry[9:],self.process.source)
387  elif entry.startswith("dbs:") or entry.startswith("das:"):
388  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
389  else:
390  self.process.source.fileNames.append(self._options.dirin+entry)
391  if self._options.secondfilein:
392  if not hasattr(self.process.source,"secondaryFileNames"):
393  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
394  for entry in self._options.secondfilein.split(','):
395  print "entry",entry
396  if entry.startswith("filelist:"):
397  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
398  elif entry.startswith("dbs:") or entry.startswith("das:"):
399  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
400  else:
401  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
402 
403  if self._options.filein or self._options.dasquery:
404  if self._options.filetype == "EDM":
405  self.process.source=cms.Source("PoolSource",
406  fileNames = cms.untracked.vstring(),
407  secondaryFileNames= cms.untracked.vstring())
408  filesFromOption(self)
409  elif self._options.filetype == "DAT":
410  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
411  filesFromOption(self)
412  elif self._options.filetype == "LHE":
413  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
414  if self._options.filein.startswith("lhe:"):
415  #list the article directory automatically
416  args=self._options.filein.split(':')
417  article=args[1]
418  print 'LHE input from article ',article
419  location='/store/lhe/'
420  import os
421  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
422  for line in textOfFiles:
423  for fileName in [x for x in line.split() if '.lhe' in x]:
424  self.process.source.fileNames.append(location+article+'/'+fileName)
425  if len(args)>2:
426  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
427  else:
428  filesFromOption(self)
429 
430 
431  elif self._options.filetype == "DQM":
432  self.process.source=cms.Source("DQMRootSource",
433  fileNames = cms.untracked.vstring())
434  filesFromOption(self)
435 
436  elif self._options.filetype == "DQMDAQ":
437  # FIXME: how to configure it if there are no input files specified?
438  self.process.source=cms.Source("DQMStreamerReader")
439 
440 
441  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
442  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
443 
444  if self._options.dasquery!='':
445  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
446  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
447 
448  ##drop LHEXMLStringProduct on input to save memory if appropriate
449  if 'GEN' in self.stepMap.keys():
450  if self._options.inputCommands:
451  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
452  else:
453  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
454 
455  if self.process.source and self._options.inputCommands:
456  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
457  for command in self._options.inputCommands.split(','):
458  # remove whitespace around the keep/drop statements
459  command = command.strip()
460  if command=='': continue
461  self.process.source.inputCommands.append(command)
462  if not self._options.dropDescendant:
463  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
464 
465  if self._options.lumiToProcess:
466  import FWCore.PythonUtilities.LumiList as LumiList
467  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
468 
469  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
470  if self.process.source is None:
471  self.process.source=cms.Source("EmptySource")
472 
473  # modify source in case of run-dependent MC
474  self.runsAndWeights=None
475  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
476  if not self._options.isMC :
477  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
478  if self._options.runsAndWeightsForMC:
479  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
480  else:
481  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
482  if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
483  __import__(RunsAndWeights[self._options.runsScenarioForMC])
484  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
485  else:
486  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
487 
488  if self.runsAndWeights:
489  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
490  ThrowAndSetRandomRun.throwAndSetRandomRun(self.process.source,self.runsAndWeights)
491  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
492  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
493 
494  return
495 
496  def addOutput(self):
497  """ Add output module to the process """
498  result=""
499  if self._options.outputDefinition:
500  if self._options.datatier:
501  print "--datatier & --eventcontent options ignored"
502 
503  #new output convention with a list of dict
504  outList = eval(self._options.outputDefinition)
505  for (id,outDefDict) in enumerate(outList):
506  outDefDictStr=outDefDict.__str__()
507  if not isinstance(outDefDict,dict):
508  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
509  #requires option: tier
510  theTier=anyOf(['t','tier','dataTier'],outDefDict)
511  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
512  ## event content
513  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
514  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
515  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
516  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
517  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
518  # module label has a particular role
519  if not theModuleLabel:
520  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
521  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
522  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
523  ]
524  for name in tryNames:
525  if not hasattr(self.process,name):
526  theModuleLabel=name
527  break
528  if not theModuleLabel:
529  raise Exception("cannot find a module label for specification: "+outDefDictStr)
530  if id==0:
531  defaultFileName=self._options.outfile_name
532  else:
533  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
534 
535  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
536  if not theFileName.endswith('.root'):
537  theFileName+='.root'
538 
539  if len(outDefDict.keys()):
540  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
541  if theStreamType=='DQMIO': theStreamType='DQM'
542  if theStreamType=='ALL':
543  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
544  else:
545  theEventContent = getattr(self.process, theStreamType+"EventContent")
546 
547 
548  addAlCaSelects=False
549  if theStreamType=='ALCARECO' and not theFilterName:
550  theFilterName='StreamALCACombined'
551  addAlCaSelects=True
552 
553  CppType='PoolOutputModule'
554  if self._options.timeoutOutput:
555  CppType='TimeoutPoolOutputModule'
556  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
557  output = cms.OutputModule(CppType,
558  theEventContent.clone(),
559  fileName = cms.untracked.string(theFileName),
560  dataset = cms.untracked.PSet(
561  dataTier = cms.untracked.string(theTier),
562  filterName = cms.untracked.string(theFilterName))
563  )
564  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
565  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
566  if not theSelectEvent and hasattr(self.process,'filtering_step'):
567  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
568  if theSelectEvent:
569  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
570 
571  if addAlCaSelects:
572  if not hasattr(output,'SelectEvents'):
573  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
574  for alca in self.AlCaPaths:
575  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
576 
577 
578  if hasattr(self.process,theModuleLabel):
579  raise Exception("the current process already has a module "+theModuleLabel+" defined")
580  #print "creating output module ",theModuleLabel
581  setattr(self.process,theModuleLabel,output)
582  outputModule=getattr(self.process,theModuleLabel)
583  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
584  path=getattr(self.process,theModuleLabel+'_step')
585  self.schedule.append(path)
586 
587  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
588  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"):
589  return label
590  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
591  if theExtraOutputCommands:
592  if not isinstance(theExtraOutputCommands,list):
593  raise Exception("extra ouput command in --option must be a list of strings")
594  if hasattr(self.process,theStreamType+"EventContent"):
595  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
596  else:
597  outputModule.outputCommands.extend(theExtraOutputCommands)
598 
599  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
600 
601  ##ends the --output options model
602  return result
603 
604  streamTypes=self._options.eventcontent.split(',')
605  tiers=self._options.datatier.split(',')
606  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
607  raise Exception("number of event content arguments does not match number of datatier arguments")
608 
609  # if the only step is alca we don't need to put in an output
610  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
611  return "\n"
612 
613  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
614  if streamType=='': continue
615  if streamType=='DQMIO': streamType='DQM'
616  theEventContent = getattr(self.process, streamType+"EventContent")
617  if i==0:
618  theFileName=self._options.outfile_name
619  theFilterName=self._options.filtername
620  else:
621  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
622  theFilterName=self._options.filtername
623  CppType='PoolOutputModule'
624  if self._options.timeoutOutput:
625  CppType='TimeoutPoolOutputModule'
626  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
627  output = cms.OutputModule(CppType,
628  theEventContent,
629  fileName = cms.untracked.string(theFileName),
630  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
631  filterName = cms.untracked.string(theFilterName)
632  )
633  )
634  if hasattr(self.process,"generation_step") and streamType!='LHE':
635  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
636  if hasattr(self.process,"filtering_step"):
637  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
638 
639  if streamType=='ALCARECO':
640  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
641 
642  if "MINIAOD" in streamType:
643  output.dropMetaData = cms.untracked.string('ALL')
644  output.fastCloning= cms.untracked.bool(False)
645  output.overrideInputFileSplitLevels = cms.untracked.bool(True)
646 
647  outputModuleName=streamType+'output'
648  setattr(self.process,outputModuleName,output)
649  outputModule=getattr(self.process,outputModuleName)
650  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
651  path=getattr(self.process,outputModuleName+'_step')
652  self.schedule.append(path)
653 
654  if self._options.outputCommands and streamType!='DQM':
655  for evct in self._options.outputCommands.split(','):
656  if not evct: continue
657  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
658 
659  if not self._options.inlineEventContent:
660  def doNotInlineEventContent(instance,label = "process."+streamType+"EventContent.outputCommands"):
661  return label
662  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
663 
664  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
665 
666  return result
667 
669  """
670  Add selected standard sequences to the process
671  """
672  # load the pile up file
673  if self._options.pileup:
674  pileupSpec=self._options.pileup.split(',')[0]
675 
676  # Does the requested pile-up scenario exist?
677  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
678  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
679  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
680  raise Exception(message)
681 
682  # Put mixing parameters in a dictionary
683  if '.' in pileupSpec:
684  mixingDict={'file':pileupSpec}
685  elif pileupSpec.startswith('file:'):
686  mixingDict={'file':pileupSpec[5:]}
687  else:
688  import copy
689  mixingDict=copy.copy(Mixing[pileupSpec])
690  if len(self._options.pileup.split(','))>1:
691  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
692 
693  # Load the pu cfg file corresponding to the requested pu scenario
694  if 'file:' in pileupSpec:
695  #the file is local
696  self.process.load(mixingDict['file'])
697  print "inlining mixing module configuration"
698  self._options.inlineObjets+=',mix'
699  else:
700  self.loadAndRemember(mixingDict['file'])
701 
702  mixingDict.pop('file')
703  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
704  if self._options.pileup_input:
705  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
706  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
707  elif self._options.pileup_input.startswith("filelist:"):
708  mixingDict['F']=(filesFromList(self._options.pileup_input[9:]))[0]
709  else:
710  mixingDict['F']=self._options.pileup_input.split(',')
711  specialization=defineMixing(mixingDict)
712  for command in specialization:
713  self.executeAndRemember(command)
714  if len(mixingDict)!=0:
715  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
716 
717 
718  # load the geometry file
719  try:
720  if len(self.stepMap):
721  self.loadAndRemember(self.GeometryCFF)
722  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
723  self.loadAndRemember(self.SimGeometryCFF)
724  if self.geometryDBLabel:
725  self.executeAndRemember('process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
726  except ImportError:
727  print "Geometry option",self._options.geometry,"unknown."
728  raise
729 
730  if len(self.stepMap):
731  self.loadAndRemember(self.magFieldCFF)
732 
733  for stepName in self.stepKeys:
734  stepSpec = self.stepMap[stepName]
735  print "Step:", stepName,"Spec:",stepSpec
736  if stepName.startswith('re'):
737  ##add the corresponding input content
738  if stepName[2:] not in self._options.donotDropOnInput:
739  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
740  stepName=stepName[2:]
741  if stepSpec=="":
742  getattr(self,"prepare_"+stepName)(sequence = getattr(self,stepName+"DefaultSeq"))
743  elif type(stepSpec)==list:
744  getattr(self,"prepare_"+stepName)(sequence = '+'.join(stepSpec))
745  elif type(stepSpec)==tuple:
746  getattr(self,"prepare_"+stepName)(sequence = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
747  else:
748  raise ValueError("Invalid step definition")
749 
750  if self._options.restoreRNDSeeds!=False:
751  #it is either True, or a process name
752  if self._options.restoreRNDSeeds==True:
753  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
754  else:
755  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
756  if self._options.inputEventContent or self._options.inputCommands:
757  if self._options.inputCommands:
758  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
759  else:
760  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
761 
762 
764  if self._options.inputEventContent:
765  import copy
766  def dropSecondDropStar(iec):
767  #drop occurence of 'drop *' in the list
768  count=0
769  for item in iec:
770  if item=='drop *':
771  if count!=0:
772  iec.remove(item)
773  count+=1
774 
775 
776  ## allow comma separated input eventcontent
777  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
778  for evct in self._options.inputEventContent.split(','):
779  if evct=='': continue
780  theEventContent = getattr(self.process, evct+"EventContent")
781  if hasattr(theEventContent,'outputCommands'):
782  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
783  if hasattr(theEventContent,'inputCommands'):
784  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
785 
786  dropSecondDropStar(self.process.source.inputCommands)
787 
788  if not self._options.dropDescendant:
789  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
790 
791 
792  return
793 
794  def addConditions(self):
795  """Add conditions to the process"""
796  if not self._options.conditions: return
797 
798  if 'FrontierConditions_GlobalTag' in self._options.conditions:
799  print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line'
800  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
801 
802  self.loadAndRemember(self.ConditionsDefaultCFF)
803  from Configuration.AlCa.GlobalTag import GlobalTag
804  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
805  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
806  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
807 
808 
809  def addCustomise(self,unsch=0):
810  """Include the customise code """
811 
812  custOpt=[]
813  if unsch==0:
814  for c in self._options.customisation_file:
815  custOpt.extend(c.split(","))
816  else:
817  for c in self._options.customisation_file_unsch:
818  custOpt.extend(c.split(","))
819 
821  for opt in custOpt:
822  if opt=='': continue
823  if opt.count('.')>1:
824  raise Exception("more than . in the specification:"+opt)
825  fileName=opt.split('.')[0]
826  if opt.count('.')==0: rest='customise'
827  else:
828  rest=opt.split('.')[1]
829  if rest=='py': rest='customise' #catch the case of --customise file.py
830 
831  if fileName in custMap:
832  custMap[fileName].extend(rest.split('+'))
833  else:
834  custMap[fileName]=rest.split('+')
835 
836  if len(custMap)==0:
837  final_snippet='\n'
838  else:
839  final_snippet='\n# customisation of the process.\n'
840 
841  allFcn=[]
842  for opt in custMap:
843  allFcn.extend(custMap[opt])
844  for fcn in allFcn:
845  if allFcn.count(fcn)!=1:
846  raise Exception("cannot specify twice "+fcn+" as a customisation method")
847 
848  for f in custMap:
849  # let python search for that package and do syntax checking at the same time
850  packageName = f.replace(".py","").replace("/",".")
851  __import__(packageName)
852  package = sys.modules[packageName]
853 
854  # now ask the package for its definition and pick .py instead of .pyc
855  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
856 
857  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
858  if self._options.inline_custom:
859  for line in file(customiseFile,'r'):
860  if "import FWCore.ParameterSet.Config" in line:
861  continue
862  final_snippet += line
863  else:
864  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
865  for fcn in custMap[f]:
866  print "customising the process with",fcn,"from",f
867  if not hasattr(package,fcn):
868  #bound to fail at run time
869  raise Exception("config "+f+" has no function "+fcn)
870  #execute the command
871  self.process=getattr(package,fcn)(self.process)
872  #and print it in the configuration
873  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
874  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
875 
876  if len(custMap)!=0:
877  final_snippet += '\n# End of customisation functions\n'
878 
879  ### now for a useful command
880  return final_snippet
881 
883  final_snippet='\n# Customisation from command line\n'
884  if self._options.customise_commands:
885  import string
886  for com in self._options.customise_commands.split('\\n'):
887  com=string.lstrip(com)
888  self.executeAndRemember(com)
889  final_snippet +='\n'+com
890 
891  return final_snippet
892 
893  #----------------------------------------------------------------------------
894  # here the methods to define the python includes for each step or
895  # conditions
896  #----------------------------------------------------------------------------
897  def define_Configs(self):
898  if len(self.stepMap):
899  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
900  if self._options.particleTable not in defaultOptions.particleTableList:
901  print 'Invalid particle table provided. Options are:'
902  print defaultOptions.particleTable
903  sys.exit(-1)
904  else:
905  if len(self.stepMap):
906  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
907 
908  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
909 
910  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
911  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
912  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
913  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
914  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
915  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
916  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
917  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
918  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
919  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
920  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
921  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
922  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
923  self.EIDefaultCFF=None
924  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
925  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
926  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
927  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
928  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
929  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
930  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
931  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
932  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
933  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
934  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
935 
936  if "DATAMIX" in self.stepMap.keys():
937  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
938  if self._options.datamix == 'PreMix':
939  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDMPreMix_cff"
940  else:
941  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
942  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
943  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
944 
945  if "DIGIPREMIX" in self.stepMap.keys():
946  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_PreMix_cff"
947 
948  self.ALCADefaultSeq=None
949  self.LHEDefaultSeq='externalLHEProducer'
950  self.GENDefaultSeq='pgen'
951  self.SIMDefaultSeq='psim'
952  self.DIGIDefaultSeq='pdigi'
953  self.DIGIPREMIXDefaultSeq='pdigi'
954  self.DIGIPREMIX_S2DefaultSeq='pdigi'
955  self.DATAMIXDefaultSeq=None
956  self.DIGI2RAWDefaultSeq='DigiToRaw'
957  self.HLTDefaultSeq='GRun'
958  self.L1DefaultSeq=None
959  self.L1REPACKDefaultSeq='GT'
960  self.HARVESTINGDefaultSeq=None
961  self.ALCAHARVESTDefaultSeq=None
962  self.CFWRITERDefaultSeq=None
963  self.RAW2DIGIDefaultSeq='RawToDigi'
964  self.L1RecoDefaultSeq='L1Reco'
965  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
966  if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
967  self.RECODefaultSeq='reconstruction'
968  else:
969  self.RECODefaultSeq='reconstruction_fromRECO'
970 
971  self.EIDefaultSeq='top'
972  self.POSTRECODefaultSeq=None
973  self.L1HwValDefaultSeq='L1HwVal'
974  self.DQMDefaultSeq='DQMOffline'
975  self.VALIDATIONDefaultSeq=''
976  self.ENDJOBDefaultSeq='endOfProcess'
977  self.REPACKDefaultSeq='DigiToRawRepack'
978  self.PATDefaultSeq='miniAOD'
979 
980  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
981 
982  if not self._options.beamspot:
983  self._options.beamspot=VtxSmearedDefaultKey
984 
985  # if its MC then change the raw2digi
986  if self._options.isMC==True:
987  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
988  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
989  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
990  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
991  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
992  else:
993  self._options.beamspot = None
994 
995  #patch for gen, due to backward incompatibility
996  if 'reGEN' in self.stepMap:
997  self.GENDefaultSeq='fixGenInfo'
998 
999  if self._options.scenario=='cosmics':
1000  self._options.pileup='Cosmics'
1001  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1002  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1003  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1004  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1005  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1006  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1007  if self._options.isMC==True:
1008  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1009  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1010  self.RECODefaultSeq='reconstructionCosmics'
1011  self.DQMDefaultSeq='DQMOfflineCosmics'
1012 
1013  if self._options.scenario=='HeavyIons':
1014  if not self._options.beamspot:
1015  self._options.beamspot=VtxSmearedHIDefaultKey
1016  self.HLTDefaultSeq = 'HIon'
1017  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1018  self.VALIDATIONDefaultSeq=''
1019  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1020  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1021  self.RECODefaultSeq='reconstructionHeavyIons'
1022  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1023  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1024  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1025  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1026  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1027  if self._options.isMC==True:
1028  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1029 
1030 
1031  self.RAW2RECODefaultSeq=','.join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1032 
1033  self.USERDefaultSeq='user'
1034  self.USERDefaultCFF=None
1035 
1036  # the magnetic field
1037  if self._options.isData:
1038  if self._options.magField==defaultOptions.magField:
1039  print "magnetic field option forced to: AutoFromDBCurrent"
1040  self._options.magField='AutoFromDBCurrent'
1041  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1042  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1043 
1044  # the geometry
1045  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1046  self.geometryDBLabel=None
1047  simGeometry=''
1048  if self._options.fast:
1049  if 'start' in self._options.conditions.lower():
1050  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1051  else:
1052  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1053  else:
1054  def inGeometryKeys(opt):
1055  from Configuration.StandardSequences.GeometryConf import GeometryConf
1056  if opt in GeometryConf:
1057  return GeometryConf[opt]
1058  else:
1059  return opt
1060 
1061  geoms=self._options.geometry.split(',')
1062  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1063  if len(geoms)==2:
1064  #may specify the reco geometry
1065  if '/' in geoms[1] or '_cff' in geoms[1]:
1066  self.GeometryCFF=geoms[1]
1067  else:
1068  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1069 
1070  if (geoms[0].startswith('DB:')):
1071  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1072  self.geometryDBLabel=geoms[0][3:]
1073  print "with DB:"
1074  else:
1075  if '/' in geoms[0] or '_cff' in geoms[0]:
1076  self.SimGeometryCFF=geoms[0]
1077  else:
1078  simGeometry=geoms[0]
1079  if self._options.gflash==True:
1080  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1081  else:
1082  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1083 
1084  # synchronize the geometry configuration and the FullSimulation sequence to be used
1085  if simGeometry not in defaultOptions.geometryExtendedOptions:
1086  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1087 
1088  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1089  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1090  self._options.beamspot='NoSmear'
1091 
1092  # fastsim requires some changes to the default cff files and sequences
1093  if self._options.fast:
1094  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1095  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1096  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1097  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1098  self.DQMOFFLINEDefaultCFF="FastSimulation.Configuration.DQMOfflineMC_cff"
1099 
1100  # Mixing
1101  if self._options.pileup=='default':
1102  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1103  self._options.pileup=MixingDefaultKey
1104 
1105 
1106  #not driven by a default cff anymore
1107  if self._options.isData:
1108  self._options.pileup=None
1109 
1110 
1111  self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1112 
1113  # for alca, skims, etc
1114  def addExtraStream(self,name,stream,workflow='full'):
1115  # define output module and go from there
1116  output = cms.OutputModule("PoolOutputModule")
1117  if stream.selectEvents.parameters_().__len__()!=0:
1118  output.SelectEvents = stream.selectEvents
1119  else:
1120  output.SelectEvents = cms.untracked.PSet()
1121  output.SelectEvents.SelectEvents=cms.vstring()
1122  if isinstance(stream.paths,tuple):
1123  for path in stream.paths:
1124  output.SelectEvents.SelectEvents.append(path.label())
1125  else:
1126  output.SelectEvents.SelectEvents.append(stream.paths.label())
1127 
1128 
1129 
1130  if isinstance(stream.content,str):
1131  evtPset=getattr(self.process,stream.content)
1132  for p in evtPset.parameters_():
1133  setattr(output,p,getattr(evtPset,p))
1134  if not self._options.inlineEventContent:
1135  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1136  return label
1137  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1138  else:
1139  output.outputCommands = stream.content
1140 
1141 
1142  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1143 
1144  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1145  filterName = cms.untracked.string(stream.name))
1146 
1147  if self._options.filtername:
1148  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1149 
1150  #add an automatic flushing to limit memory consumption
1151  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1152 
1153  if workflow in ("producers,full"):
1154  if isinstance(stream.paths,tuple):
1155  for path in stream.paths:
1156  self.schedule.append(path)
1157  else:
1158  self.schedule.append(stream.paths)
1159 
1160 
1161  # in case of relvals we don't want to have additional outputs
1162  if (not self._options.relval) and workflow in ("full","output"):
1163  self.additionalOutputs[name] = output
1164  setattr(self.process,name,output)
1165 
1166  if workflow == 'output':
1167  # adjust the select events to the proper trigger results from previous process
1168  filterList = output.SelectEvents.SelectEvents
1169  for i, filter in enumerate(filterList):
1170  filterList[i] = filter+":"+self._options.triggerResultsProcess
1171 
1172  return output
1173 
1174  #----------------------------------------------------------------------------
1175  # here the methods to create the steps. Of course we are doing magic here ;)
1176  # prepare_STEPNAME modifies self.process and what else's needed.
1177  #----------------------------------------------------------------------------
1178 
1179  def loadDefaultOrSpecifiedCFF(self, sequence,defaultCFF,unsch=0):
1180  if ( len(sequence.split('.'))==1 ):
1181  l=self.loadAndRemember(defaultCFF,unsch)
1182  elif ( len(sequence.split('.'))==2 ):
1183  l=self.loadAndRemember(sequence.split('.')[0],unsch)
1184  sequence=sequence.split('.')[1]
1185  else:
1186  print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a"
1187  print sequence,"not recognized"
1188  raise
1189  return l
1190 
1191  def scheduleSequence(self,seq,prefix,what='Path'):
1192  if '*' in seq:
1193  #create only one path with all sequences in it
1194  for i,s in enumerate(seq.split('*')):
1195  if i==0:
1196  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1197  else:
1198  p=getattr(self.process,prefix)
1199  p+=getattr(self.process, s)
1200  self.schedule.append(getattr(self.process,prefix))
1201  return
1202  else:
1203  #create as many path as many sequences
1204  if not '+' in seq:
1205  if self.nextScheduleIsConditional:
1206  self.conditionalPaths.append(prefix)
1207  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1208  self.schedule.append(getattr(self.process,prefix))
1209  else:
1210  for i,s in enumerate(seq.split('+')):
1211  sn=prefix+'%d'%(i)
1212  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1213  self.schedule.append(getattr(self.process,sn))
1214  return
1215 
1216  def scheduleSequenceAtEnd(self,seq,prefix):
1217  self.scheduleSequence(seq,prefix,what='EndPath')
1218  return
1219 
1220  def prepare_ALCAPRODUCER(self, sequence = None):
1221  self.prepare_ALCA(sequence, workflow = "producers")
1222 
1223  def prepare_ALCAOUTPUT(self, sequence = None):
1224  self.prepare_ALCA(sequence, workflow = "output")
1225 
1226  def prepare_ALCA(self, sequence = None, workflow = 'full'):
1227  """ Enrich the process with alca streams """
1228  alcaConfig=self.loadDefaultOrSpecifiedCFF(sequence,self.ALCADefaultCFF)
1229  sequence = sequence.split('.')[-1]
1230 
1231  # decide which ALCA paths to use
1232  alcaList = sequence.split("+")
1233  maxLevel=0
1234  from Configuration.AlCa.autoAlca import autoAlca
1235  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1236  self.expandMapping(alcaList,autoAlca)
1237  self.AlCaPaths=[]
1238  for name in alcaConfig.__dict__:
1239  alcastream = getattr(alcaConfig,name)
1240  shortName = name.replace('ALCARECOStream','')
1241  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1242  output = self.addExtraStream(name,alcastream, workflow = workflow)
1243  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1244  self.AlCaPaths.append(shortName)
1245  if 'DQM' in alcaList:
1246  if not self._options.inlineEventContent and hasattr(self.process,name):
1247  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1248  else:
1249  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1250 
1251  #rename the HLT process name in the alca modules
1252  if self._options.hltProcess or 'HLT' in self.stepMap:
1253  if isinstance(alcastream.paths,tuple):
1254  for path in alcastream.paths:
1255  self.renameHLTprocessInSequence(path.label())
1256  else:
1257  self.renameHLTprocessInSequence(alcastream.paths.label())
1258 
1259  for i in range(alcaList.count(shortName)):
1260  alcaList.remove(shortName)
1261 
1262  # DQM needs a special handling
1263  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1264  path = getattr(alcaConfig,name)
1265  self.schedule.append(path)
1266  alcaList.remove('DQM')
1267 
1268  if isinstance(alcastream,cms.Path):
1269  #black list the alca path so that they do not appear in the cfg
1270  self.blacklist_paths.append(alcastream)
1271 
1272 
1273  if len(alcaList) != 0:
1274  available=[]
1275  for name in alcaConfig.__dict__:
1276  alcastream = getattr(alcaConfig,name)
1277  if isinstance(alcastream,cms.FilteredStream):
1278  available.append(name.replace('ALCARECOStream',''))
1279  print "The following alcas could not be found "+str(alcaList)
1280  print "available ",available
1281  #print "verify your configuration, ignoring for now"
1282  raise Exception("The following alcas could not be found "+str(alcaList))
1283 
1284  def prepare_LHE(self, sequence = None):
1285  #load the fragment
1286  ##make it loadable
1287  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1288  print "Loading lhe fragment from",loadFragment
1289  __import__(loadFragment)
1290  self.process.load(loadFragment)
1291  ##inline the modules
1292  self._options.inlineObjets+=','+sequence
1293 
1294  getattr(self.process,sequence).nEvents = int(self._options.number)
1295 
1296  #schedule it
1297  self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1298  self.excludedPaths.append("lhe_step")
1299  self.schedule.append( self.process.lhe_step )
1300 
1301  def prepare_GEN(self, sequence = None):
1302  """ load the fragment of generator configuration """
1303  loadFailure=False
1304  #remove trailing .py
1305  #support old style .cfi by changing into something.cfi into something_cfi
1306  #remove python/ from the name
1307  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1308  #standard location of fragments
1309  if not '/' in loadFragment:
1310  loadFragment='Configuration.Generator.'+loadFragment
1311  else:
1312  loadFragment=loadFragment.replace('/','.')
1313  try:
1314  print "Loading generator fragment from",loadFragment
1315  __import__(loadFragment)
1316  except:
1317  loadFailure=True
1318  #if self.process.source and self.process.source.type_()=='EmptySource':
1319  if not (self._options.filein or self._options.dasquery):
1320  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1321 
1322  if not loadFailure:
1323  generatorModule=sys.modules[loadFragment]
1324  genModules=generatorModule.__dict__
1325  #remove lhe producer module since this should have been
1326  #imported instead in the LHE step
1327  if self.LHEDefaultSeq in genModules:
1328  del genModules[self.LHEDefaultSeq]
1329 
1330  if self._options.hideGen:
1331  self.loadAndRemember(loadFragment)
1332  else:
1333  self.process.load(loadFragment)
1334  # expose the objects from that fragment to the configuration
1335  import FWCore.ParameterSet.Modules as cmstypes
1336  for name in genModules:
1337  theObject = getattr(generatorModule,name)
1338  if isinstance(theObject, cmstypes._Module):
1339  self._options.inlineObjets=name+','+self._options.inlineObjets
1340  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1341  self._options.inlineObjets+=','+name
1342 
1343  if sequence == self.GENDefaultSeq or sequence == 'pgen_genonly':
1344  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1345  self.productionFilterSequence = 'ProductionFilterSequence'
1346  elif 'generator' in genModules:
1347  self.productionFilterSequence = 'generator'
1348 
1349  """ Enrich the schedule with the rest of the generation step """
1350  self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1351  genSeqName=sequence.split('.')[-1]
1352 
1353  if True:
1354  try:
1355  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1356  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1357  self.loadAndRemember(cffToBeLoaded)
1358  except ImportError:
1359  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1360 
1361  if self._options.scenario == 'HeavyIons':
1362  if self._options.pileup=='HiMixGEN':
1363  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1364  else:
1365  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1366 
1367  self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1368  self.schedule.append(self.process.generation_step)
1369 
1370  #register to the genstepfilter the name of the path (static right now, but might evolve)
1371  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1372 
1373  if 'reGEN' in self.stepMap:
1374  #stop here
1375  return
1376 
1377  """ Enrich the schedule with the summary of the filter step """
1378  #the gen filter in the endpath
1379  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1380  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1381  return
1382 
1383  def prepare_SIM(self, sequence = None):
1384  """ Enrich the schedule with the simulation step"""
1385  self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1386  if not self._options.fast:
1387  if self._options.gflash==True:
1388  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1389 
1390  if self._options.magField=='0T':
1391  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1392  else:
1393  if self._options.magField=='0T':
1394  self.executeAndRemember("process.famosSimHits.UseMagneticField = cms.bool(False)")
1395 
1396  self.scheduleSequence(sequence.split('.')[-1],'simulation_step')
1397  return
1398 
1399  def prepare_DIGI(self, sequence = None):
1400  """ Enrich the schedule with the digitisation step"""
1401  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1402 
1403  if self._options.gflash==True:
1404  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1405 
1406  if sequence == 'pdigi_valid':
1407  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1408 
1409  if sequence != 'pdigi_nogen' and sequence != 'pdigi_valid_nogen' and not self.process.source.type_()=='EmptySource':
1410  if self._options.inputEventContent=='':
1411  self._options.inputEventContent='REGEN'
1412  else:
1413  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1414 
1415 
1416  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1417  return
1418 
1419  def prepare_DIGIPREMIX(self, sequence = None):
1420  """ Enrich the schedule with the digitisation step"""
1421  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1422 
1423  self.loadAndRemember("SimGeneral/MixingModule/digi_noNoise_cfi")
1424 
1425  if sequence == 'pdigi_valid':
1426  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1427  else:
1428  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1429 
1430  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1431  return
1432 
1433  def prepare_DIGIPREMIX_S2(self, sequence = None):
1434  """ Enrich the schedule with the digitisation step"""
1435  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1436 
1437  self.loadAndRemember("SimGeneral/MixingModule/digi_MixPreMix_cfi")
1438 
1439 
1440  if sequence == 'pdigi_valid':
1441  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1442  else:
1443  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1444 
1445  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1446  return
1447 
1448  def prepare_CFWRITER(self, sequence = None):
1449  """ Enrich the schedule with the crossing frame writer step"""
1450  self.loadAndRemember(self.CFWRITERDefaultCFF)
1451  self.scheduleSequence('pcfw','cfwriter_step')
1452  return
1453 
1454  def prepare_DATAMIX(self, sequence = None):
1455  """ Enrich the schedule with the digitisation step"""
1456  self.loadAndRemember(self.DATAMIXDefaultCFF)
1457  self.scheduleSequence('pdatamix','datamixing_step')
1458 
1459  if self._options.pileup_input:
1460  theFiles=''
1461  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1462  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1463  elif self._options.pileup_input.startswith("filelist:"):
1464  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1465  else:
1466  theFiles=self._options.pileup_input.split(',')
1467  #print theFiles
1468  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1469 
1470  return
1471 
1472  def prepare_DIGI2RAW(self, sequence = None):
1473  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGI2RAWDefaultCFF)
1474  self.scheduleSequence(sequence.split('.')[-1],'digi2raw_step')
1475  if "DIGIPREMIX" in self.stepMap.keys():
1476  self.executeAndRemember("process.esDigiToRaw.Label = cms.string('mix')") ##terrible hack - bypass zero suppression
1477  self.executeAndRemember("process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')") ##special readout mode for StripTracker
1478 
1479  return
1480 
1481  def prepare_REPACK(self, sequence = None):
1482  self.loadDefaultOrSpecifiedCFF(sequence,self.REPACKDefaultCFF)
1483  self.scheduleSequence(sequence.split('.')[-1],'digi2repack_step')
1484  return
1485 
1486  def prepare_L1(self, sequence = None):
1487  """ Enrich the schedule with the L1 simulation step"""
1488  assert(sequence == None)
1489  self.loadAndRemember(self.L1EMDefaultCFF)
1490  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1491  return
1492 
1493  def prepare_L1REPACK(self, sequence = None):
1494  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1495  supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT']
1496  if sequence in supported:
1497  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1498  if self._options.scenario == 'HeavyIons':
1499  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1500  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1501  else:
1502  print "L1REPACK with '",sequence,"' is not supported! Supported choices are: ",supported
1503  raise Exception('unsupported feature')
1504 
1505 
1506  def prepare_HLT(self, sequence = None):
1507  """ Enrich the schedule with the HLT simulation step"""
1508  if not sequence:
1509  print "no specification of the hlt menu has been given, should never happen"
1510  raise Exception('no HLT sequence provided')
1511 
1512  if '@' in sequence:
1513  # case where HLT:@something was provided
1514  from Configuration.HLT.autoHLT import autoHLT
1515  key = sequence[1:]
1516  if key in autoHLT:
1517  sequence = autoHLT[key]
1518  else:
1519  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1520 
1521  if ',' in sequence:
1522  #case where HLT:something:something was provided
1523  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1524  optionsForHLT = {}
1525  if self._options.scenario == 'HeavyIons':
1526  optionsForHLT['type'] = 'HIon'
1527  else:
1528  optionsForHLT['type'] = 'GRun'
1529  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.iteritems())
1530  if sequence == 'run,fromSource':
1531  if hasattr(self.process.source,'firstRun'):
1532  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1533  elif hasattr(self.process.source,'setRunNumber'):
1534  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1535  else:
1536  raise Exception('Cannot replace menu to load %s'%(sequence))
1537  else:
1538  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(sequence.replace(',',':'),optionsForHLTConfig))
1539  else:
1540  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % sequence)
1541 
1542  if self._options.isMC:
1543  if self._options.fast:
1544  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFastSim")
1545  else:
1546  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFullSim")
1547 
1548  if self._options.name != 'HLT':
1549  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1550  self.additionalCommands.append('process = ProcessName(process)')
1551  self.additionalCommands.append('')
1552  from HLTrigger.Configuration.CustomConfigs import ProcessName
1553  self.process = ProcessName(self.process)
1554 
1555  self.schedule.append(self.process.HLTSchedule)
1556  [self.blacklist_paths.append(path) for path in self.process.HLTSchedule if isinstance(path,(cms.Path,cms.EndPath))]
1557 
1558  #this is a fake, to be removed with fastim migration and HLT menu dump
1559  if self._options.fast:
1560  if not hasattr(self.process,'HLTEndSequence'):
1561  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1562 
1563 
1564  def prepare_RAW2RECO(self, sequence = None):
1565  if ','in sequence:
1566  seqReco=sequence.split(',')[1]
1567  seqDigi=sequence.split(',')[0]
1568  else:
1569  print "RAW2RECO requires two specifications",sequence,"insufficient"
1570 
1571  self.prepare_RAW2DIGI(seqDigi)
1572  self.prepare_RECO(seqReco)
1573  return
1574 
1575  def prepare_RAW2DIGI(self, sequence = "RawToDigi"):
1576  self.loadDefaultOrSpecifiedCFF(sequence,self.RAW2DIGIDefaultCFF)
1577  self.scheduleSequence(sequence,'raw2digi_step')
1578  # if self._options.isRepacked:
1579  #self.renameInputTagsInSequence(sequence)
1580  return
1581 
1582  def prepare_PATFILTER(self, sequence=None):
1583  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1584  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1585  for filt in allMetFilterPaths:
1586  self.schedule.append(getattr(self.process,'Flag_'+filt))
1587 
1588  def prepare_L1HwVal(self, sequence = 'L1HwVal'):
1589  ''' Enrich the schedule with L1 HW validation '''
1590  self.loadDefaultOrSpecifiedCFF(sequence,self.L1HwValDefaultCFF)
1591  #self.scheduleSequence(sequence.split('.')[-1],'l1hwval_step')
1592  print '\n\n\n DEPRECATED this has no action \n\n\n'
1593  return
1594 
1595  def prepare_L1Reco(self, sequence = "L1Reco"):
1596  ''' Enrich the schedule with L1 reconstruction '''
1597  self.loadDefaultOrSpecifiedCFF(sequence,self.L1RecoDefaultCFF)
1598  self.scheduleSequence(sequence.split('.')[-1],'L1Reco_step')
1599  return
1600 
1601  def prepare_L1TrackTrigger(self, sequence = "L1TrackTrigger"):
1602  ''' Enrich the schedule with L1 reconstruction '''
1604  self.scheduleSequence(sequence.split('.')[-1],'L1TrackTrigger_step')
1605  return
1606 
1607  def prepare_FILTER(self, sequence = None):
1608  ''' Enrich the schedule with a user defined filter sequence '''
1609  ## load the relevant part
1610  filterConfig=self.load(sequence.split('.')[0])
1611  filterSeq=sequence.split('.')[-1]
1612  ## print it in the configuration
1613  class PrintAllModules(object):
1614  def __init__(self):
1615  self.inliner=''
1616  pass
1617  def enter(self,visitee):
1618  try:
1619  label=visitee.label()
1620  ##needs to be in reverse order
1621  self.inliner=label+','+self.inliner
1622  except:
1623  pass
1624  def leave(self,v): pass
1625 
1626  expander=PrintAllModules()
1627  getattr(self.process,filterSeq).visit( expander )
1628  self._options.inlineObjets+=','+expander.inliner
1629  self._options.inlineObjets+=','+filterSeq
1630 
1631  ## put the filtering path in the schedule
1632  self.scheduleSequence(filterSeq,'filtering_step')
1633  self.nextScheduleIsConditional=True
1634  ## put it before all the other paths
1635  self.productionFilterSequence = filterSeq
1636 
1637  return
1638 
1639  def prepare_RECO(self, sequence = "reconstruction"):
1640  ''' Enrich the schedule with reconstruction '''
1641  self.loadDefaultOrSpecifiedCFF(sequence,self.RECODefaultCFF)
1642  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_step')
1643  return
1644 
1645  def prepare_RECOBEFMIX(self, sequence = "reconstruction"):
1646  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1647  if not self._options.fast:
1648  print "ERROR: this step is only implemented for FastSim"
1649  sys.exit()
1650  self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1651  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_befmix_step')
1652  return
1653 
1654  def prepare_PAT(self, sequence = "miniAOD"):
1655  ''' Enrich the schedule with PAT '''
1656  self.prepare_PATFILTER(self)
1657  self.loadDefaultOrSpecifiedCFF(sequence,self.PATDefaultCFF,1) #this is unscheduled
1658  if not self._options.runUnscheduled:
1659  raise Exception("MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1660  if self._options.isData:
1661  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1662  else:
1663  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1664  if self._options.fast:
1665  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff.miniAOD_customizeMETFiltersFastSim")
1666 
1667  if self._options.hltProcess:
1668  if len(self._options.customise_commands) > 1:
1669  self._options.customise_commands = self._options.customise_commands + " \n"
1670  self._options.customise_commands = self._options.customise_commands + "process.patTrigger.processName = \""+self._options.hltProcess+"\""
1671 # self.renameHLTprocessInSequence(sequence)
1672 
1673  return
1674 
1675  def prepare_EI(self, sequence = None):
1676  ''' Enrich the schedule with event interpretation '''
1677  from Configuration.StandardSequences.EventInterpretation import EventInterpretation
1678  if sequence in EventInterpretation:
1679  self.EIDefaultCFF = EventInterpretation[sequence]
1680  sequence = 'EIsequence'
1681  else:
1682  raise Exception('Cannot set %s event interpretation'%( sequence) )
1683  self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1684  self.scheduleSequence(sequence.split('.')[-1],'eventinterpretaion_step')
1685  return
1686 
1687  def prepare_SKIM(self, sequence = "all"):
1688  ''' Enrich the schedule with skimming fragments'''
1689  skimConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.SKIMDefaultCFF)
1690  sequence = sequence.split('.')[-1]
1691 
1692  skimlist=sequence.split('+')
1693  ## support @Mu+DiJet+@Electron configuration via autoSkim.py
1694  from Configuration.Skimming.autoSkim import autoSkim
1695  self.expandMapping(skimlist,autoSkim)
1696 
1697  #print "dictionnary for skims:",skimConfig.__dict__
1698  for skim in skimConfig.__dict__:
1699  skimstream = getattr(skimConfig,skim)
1700  if isinstance(skimstream,cms.Path):
1701  #black list the alca path so that they do not appear in the cfg
1702  self.blacklist_paths.append(skimstream)
1703  if (not isinstance(skimstream,cms.FilteredStream)):
1704  continue
1705  shortname = skim.replace('SKIMStream','')
1706  if (sequence=="all"):
1707  self.addExtraStream(skim,skimstream)
1708  elif (shortname in skimlist):
1709  self.addExtraStream(skim,skimstream)
1710  #add a DQM eventcontent for this guy
1711  if self._options.datatier=='DQM':
1712  self.process.load(self.EVTCONTDefaultCFF)
1713  skimstreamDQM = cms.FilteredStream(
1714  responsible = skimstream.responsible,
1715  name = skimstream.name+'DQM',
1716  paths = skimstream.paths,
1717  selectEvents = skimstream.selectEvents,
1718  content = self._options.datatier+'EventContent',
1719  dataTier = cms.untracked.string(self._options.datatier)
1720  )
1721  self.addExtraStream(skim+'DQM',skimstreamDQM)
1722  for i in range(skimlist.count(shortname)):
1723  skimlist.remove(shortname)
1724 
1725 
1726 
1727  if (skimlist.__len__()!=0 and sequence!="all"):
1728  print 'WARNING, possible typo with SKIM:'+'+'.join(skimlist)
1729  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1730 
1731  def prepare_USER(self, sequence = None):
1732  ''' Enrich the schedule with a user defined sequence '''
1733  self.loadDefaultOrSpecifiedCFF(sequence,self.USERDefaultCFF)
1734  self.scheduleSequence(sequence.split('.')[-1],'user_step')
1735  return
1736 
1737  def prepare_POSTRECO(self, sequence = None):
1738  """ Enrich the schedule with the postreco step """
1739  self.loadAndRemember(self.POSTRECODefaultCFF)
1740  self.scheduleSequence('postreco_generator','postreco_step')
1741  return
1742 
1743 
1744  def prepare_VALIDATION(self, sequence = 'validation'):
1745  print sequence,"in preparing validation"
1746  self.loadDefaultOrSpecifiedCFF(sequence,self.VALIDATIONDefaultCFF)
1747  from Validation.Configuration.autoValidation import autoValidation
1748  #in case VALIDATION:something:somethingelse -> something,somethingelse
1749  sequence=sequence.split('.')[-1]
1750  if sequence.find(',')!=-1:
1751  prevalSeqName=sequence.split(',')[0].split('+')
1752  valSeqName=sequence.split(',')[1].split('+')
1753  self.expandMapping(prevalSeqName,autoValidation,index=0)
1754  self.expandMapping(valSeqName,autoValidation,index=1)
1755  else:
1756  if '@' in sequence:
1757  prevalSeqName=sequence.split('+')
1758  valSeqName=sequence.split('+')
1759  self.expandMapping(prevalSeqName,autoValidation,index=0)
1760  self.expandMapping(valSeqName,autoValidation,index=1)
1761  else:
1762  postfix=''
1763  if sequence:
1764  postfix='_'+sequence
1765  prevalSeqName=['prevalidation'+postfix]
1766  valSeqName=['validation'+postfix]
1767  if not hasattr(self.process,valSeqName[0]):
1768  prevalSeqName=['']
1769  valSeqName=[sequence]
1770 
1771  def NFI(index):
1772  ##name from index, required to keep backward compatibility
1773  if index==0:
1774  return ''
1775  else:
1776  return '%s'%index
1777 
1778 
1779  #rename the HLT process in validation steps
1780  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1781  for s in valSeqName+prevalSeqName:
1782  if s:
1784  for (i,s) in enumerate(prevalSeqName):
1785  if s:
1786  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1787  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1788 
1789  for (i,s) in enumerate(valSeqName):
1790  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1791  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1792 
1793  #needed in case the miniAODValidation sequence is run starting from AODSIM
1794  if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1795  return
1796 
1797  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1798  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1799  self._options.restoreRNDSeeds=True
1800 
1801  if not 'DIGI' in self.stepMap and not self._options.fast:
1802  self.executeAndRemember("process.mix.playback = True")
1803  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1804  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1805  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1806 
1807  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1808  #will get in the schedule, smoothly
1809  for (i,s) in enumerate(valSeqName):
1810  getattr(self.process,'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,'validation_step%s'%NFI(i))._seq
1811 
1812  return
1813 
1814 
1816  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1817  It will climb down within PSets, VPSets and VInputTags to find its target"""
1818  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1819  self._paramReplace = paramReplace
1820  self._paramSearch = paramSearch
1821  self._verbose = verbose
1822  self._whitelist = whitelist
1823 
1824  def doIt(self,pset,base):
1825  if isinstance(pset, cms._Parameterizable):
1826  for name in pset.parameters_().keys():
1827  # skip whitelisted parameters
1828  if name in self._whitelist:
1829  continue
1830  # if I use pset.parameters_().items() I get copies of the parameter values
1831  # so I can't modify the nested pset
1832  value = getattr(pset,name)
1833  type = value.pythonTypeName()
1834  if type in ('cms.PSet', 'cms.untracked.PSet'):
1835  self.doIt(value,base+"."+name)
1836  elif type in ('cms.VPSet', 'cms.untracked.VPSet'):
1837  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1838  elif type in ('cms.string', 'cms.untracked.string'):
1839  if value.value() == self._paramSearch:
1840  if self._verbose: print "set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace)
1841  setattr(pset, name,self._paramReplace)
1842  elif type in ('cms.VInputTag', 'cms.untracked.VInputTag'):
1843  for (i,n) in enumerate(value):
1844  if not isinstance(n, cms.InputTag):
1845  n=cms.InputTag(n)
1846  if n.processName == self._paramSearch:
1847  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1848  if self._verbose:print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace)
1849  setattr(n,"processName",self._paramReplace)
1850  value[i]=n
1851  elif type in ('cms.vstring', 'cms.untracked.vstring'):
1852  for (i,n) in enumerate(value):
1853  if n==self._paramSearch:
1854  getattr(pset,name)[i]=self._paramReplace
1855  elif type in ('cms.InputTag', 'cms.untracked.InputTag'):
1856  if value.processName == self._paramSearch:
1857  if self._verbose: print "set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace)
1858  setattr(getattr(pset, name),"processName",self._paramReplace)
1859 
1860  def enter(self,visitee):
1861  label = ''
1862  try:
1863  label = visitee.label()
1864  except AttributeError:
1865  label = '<Module not in a Process>'
1866  except:
1867  label = 'other execption'
1868  self.doIt(visitee, label)
1869 
1870  def leave(self,visitee):
1871  pass
1872 
1873  #visit a sequence to repalce all input tags
1874  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1875  print "Replacing all InputTag %s => %s"%(oldT,newT)
1876  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1877  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1878  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1879  if not loadMe in self.additionalCommands:
1880  self.additionalCommands.append(loadMe)
1881  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1882 
1883  #change the process name used to address HLT results in any sequence
1884  def renameHLTprocessInSequence(self,sequence,proc=None,HLTprocess='HLT'):
1885  if self._options.hltProcess:
1886  proc=self._options.hltProcess
1887  else:
1888  proc=self.process.name_()
1889  if proc==HLTprocess: return
1890  # look up all module in dqm sequence
1891  print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1892  getattr(self.process,sequence).visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess,proc,whitelist = ("subSystemFolder",)))
1893  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1894  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1895  self.additionalCommands.append('process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1896 
1897 
1898  def expandMapping(self,seqList,mapping,index=None):
1899  maxLevel=20
1900  level=0
1901  while '@' in repr(seqList) and level<maxLevel:
1902  level+=1
1903  for specifiedCommand in seqList:
1904  if specifiedCommand.startswith('@'):
1905  location=specifiedCommand[1:]
1906  if not location in mapping:
1907  raise Exception("Impossible to map "+location+" from "+repr(mapping))
1908  mappedTo=mapping[location]
1909  if index!=None:
1910  mappedTo=mappedTo[index]
1911  seqList.remove(specifiedCommand)
1912  seqList.extend(mappedTo.split('+'))
1913  break;
1914  if level==maxLevel:
1915  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
1916 
1917  def prepare_DQM(self, sequence = 'DQMOffline'):
1918  # this one needs replacement
1919 
1920  self.loadDefaultOrSpecifiedCFF(sequence,self.DQMOFFLINEDefaultCFF)
1921  sequenceList=sequence.split('.')[-1].split('+')
1922  postSequenceList=sequence.split('.')[-1].split('+')
1923  from DQMOffline.Configuration.autoDQM import autoDQM
1924  self.expandMapping(sequenceList,autoDQM,index=0)
1925  self.expandMapping(postSequenceList,autoDQM,index=1)
1926 
1927  if len(set(sequenceList))!=len(sequenceList):
1928  sequenceList=list(set(sequenceList))
1929  print "Duplicate entries for DQM:, using",sequenceList
1930 
1931  pathName='dqmoffline_step'
1932  for (i,sequence) in enumerate(sequenceList):
1933  if (i!=0):
1934  pathName='dqmoffline_%d_step'%(i)
1935 
1936  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
1937  self.renameHLTprocessInSequence(sequence)
1938 
1939  setattr(self.process,pathName, cms.EndPath( getattr(self.process,sequence ) ) )
1940  self.schedule.append(getattr(self.process,pathName))
1941 
1942  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1943  #will get in the schedule, smoothly
1944  getattr(self.process,pathName).insert(0,self.process.genstepfilter)
1945 
1946  pathName='dqmofflineOnPAT_step'
1947  for (i,sequence) in enumerate(postSequenceList):
1948  if (i!=0):
1949  pathName='dqmofflineOnPAT_%d_step'%(i)
1950 
1951  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1952  self.schedule.append(getattr(self.process,pathName))
1953 
1954  def prepare_HARVESTING(self, sequence = None):
1955  """ Enrich the process with harvesting step """
1956  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
1957  self.loadAndRemember(self.DQMSaverCFF)
1958 
1959  harvestingConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.HARVESTINGDefaultCFF)
1960  sequence = sequence.split('.')[-1]
1961 
1962  # decide which HARVESTING paths to use
1963  harvestingList = sequence.split("+")
1964  from DQMOffline.Configuration.autoDQM import autoDQM
1965  from Validation.Configuration.autoValidation import autoValidation
1966  import copy
1967  combined_mapping = copy.deepcopy( autoDQM )
1968  combined_mapping.update( autoValidation )
1969  self.expandMapping(harvestingList,combined_mapping,index=-1)
1970 
1971  if len(set(harvestingList))!=len(harvestingList):
1972  harvestingList=list(set(harvestingList))
1973  print "Duplicate entries for HARVESTING, using",harvestingList
1974 
1975  for name in harvestingList:
1976  if not name in harvestingConfig.__dict__:
1977  print name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1978  continue
1979  harvestingstream = getattr(harvestingConfig,name)
1980  if isinstance(harvestingstream,cms.Path):
1981  self.schedule.append(harvestingstream)
1982  self.blacklist_paths.append(harvestingstream)
1983  if isinstance(harvestingstream,cms.Sequence):
1984  setattr(self.process,name+"_step",cms.Path(harvestingstream))
1985  self.schedule.append(getattr(self.process,name+"_step"))
1986 
1987  self.scheduleSequence('DQMSaver','dqmsave_step')
1988  return
1989 
1990  def prepare_ALCAHARVEST(self, sequence = None):
1991  """ Enrich the process with AlCaHarvesting step """
1992  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
1993  sequence=sequence.split(".")[-1]
1994 
1995  # decide which AlcaHARVESTING paths to use
1996  harvestingList = sequence.split("+")
1997 
1998 
1999 
2000  from Configuration.AlCa.autoPCL import autoPCL
2001  self.expandMapping(harvestingList,autoPCL)
2002 
2003  for name in harvestingConfig.__dict__:
2004  harvestingstream = getattr(harvestingConfig,name)
2005  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2006  self.schedule.append(harvestingstream)
2007  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2008  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2009  harvestingList.remove(name)
2010  # append the common part at the end of the sequence
2011  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2012  self.schedule.append(lastStep)
2013 
2014  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2015  print "The following harvesting could not be found : ", harvestingList
2016  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2017 
2018 
2019 
2020  def prepare_ENDJOB(self, sequence = 'endOfProcess'):
2021  self.loadDefaultOrSpecifiedCFF(sequence,self.ENDJOBDefaultCFF)
2022  self.scheduleSequenceAtEnd(sequence.split('.')[-1],'endjob_step')
2023  return
2024 
2026  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2027  self.schedule.append(self.process.reconstruction)
2028 
2029 
2030  def build_production_info(self, evt_type, evtnumber):
2031  """ Add useful info for the production. """
2032  self.process.configurationMetadata=cms.untracked.PSet\
2033  (version=cms.untracked.string("$Revision: 1.19 $"),
2034  name=cms.untracked.string("Applications"),
2035  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2036  )
2037 
2038  self.addedObjects.append(("Production Info","configurationMetadata"))
2039 
2040 
2041  def prepare(self, doChecking = False):
2042  """ Prepare the configuration string and add missing pieces."""
2043 
2044  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2045  self.addMaxEvents()
2046  if self.with_input:
2047  self.addSource()
2048  self.addStandardSequences()
2049  ##adding standard sequences might change the inputEventContent option and therefore needs to be finalized after
2050  self.completeInputCommand()
2051  self.addConditions()
2052 
2053 
2054  outputModuleCfgCode=""
2055  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2056  outputModuleCfgCode=self.addOutput()
2057 
2058  self.addCommon()
2059 
2060  self.pythonCfgCode = "# Auto generated configuration file\n"
2061  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2062  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2063  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2064  if hasattr(self._options,"era") and self._options.era :
2065  self.pythonCfgCode += "from Configuration.StandardSequences.Eras import eras\n\n"
2066  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"'" # Start of the line, finished after the loop
2067  # Multiple eras can be specified in a comma seperated list
2068  for requestedEra in self._options.era.split(",") :
2069  self.pythonCfgCode += ",eras."+requestedEra
2070  self.pythonCfgCode += ")\n\n" # end of the line
2071  else :
2072  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"')\n\n"
2073 
2074  self.pythonCfgCode += "# import of standard configurations\n"
2075  for module in self.imports:
2076  self.pythonCfgCode += ("process.load('"+module+"')\n")
2077 
2078  # production info
2079  if not hasattr(self.process,"configurationMetadata"):
2080  self.build_production_info(self._options.evt_type, self._options.number)
2081  else:
2082  #the PSet was added via a load
2083  self.addedObjects.append(("Production Info","configurationMetadata"))
2084 
2085  self.pythonCfgCode +="\n"
2086  for comment,object in self.addedObjects:
2087  if comment!="":
2088  self.pythonCfgCode += "\n# "+comment+"\n"
2089  self.pythonCfgCode += dumpPython(self.process,object)
2090 
2091  # dump the output definition
2092  self.pythonCfgCode += "\n# Output definition\n"
2093  self.pythonCfgCode += outputModuleCfgCode
2094 
2095  # dump all additional outputs (e.g. alca or skim streams)
2096  self.pythonCfgCode += "\n# Additional output definition\n"
2097  #I do not understand why the keys are not normally ordered.
2098  nl=self.additionalOutputs.keys()
2099  nl.sort()
2100  for name in nl:
2101  output = self.additionalOutputs[name]
2102  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2103  tmpOut = cms.EndPath(output)
2104  setattr(self.process,name+'OutPath',tmpOut)
2105  self.schedule.append(tmpOut)
2106 
2107  # dump all additional commands
2108  self.pythonCfgCode += "\n# Other statements\n"
2109  for command in self.additionalCommands:
2110  self.pythonCfgCode += command + "\n"
2111 
2112  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2113  for object in self._options.inlineObjets.split(','):
2114  if not object:
2115  continue
2116  if not hasattr(self.process,object):
2117  print 'cannot inline -'+object+'- : not known'
2118  else:
2119  self.pythonCfgCode +='\n'
2120  self.pythonCfgCode +=dumpPython(self.process,object)
2121 
2122  # dump all paths
2123  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2124  for path in self.process.paths:
2125  if getattr(self.process,path) not in self.blacklist_paths:
2126  self.pythonCfgCode += dumpPython(self.process,path)
2127 
2128  for endpath in self.process.endpaths:
2129  if getattr(self.process,endpath) not in self.blacklist_paths:
2130  self.pythonCfgCode += dumpPython(self.process,endpath)
2131 
2132  # dump the schedule
2133  self.pythonCfgCode += "\n# Schedule definition\n"
2134  result = "process.schedule = cms.Schedule("
2135 
2136  # handling of the schedule
2137  self.process.schedule = cms.Schedule()
2138  for item in self.schedule:
2139  if not isinstance(item, cms.Schedule):
2140  self.process.schedule.append(item)
2141  else:
2142  self.process.schedule.extend(item)
2143 
2144  if hasattr(self.process,"HLTSchedule"):
2145  beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2146  afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2147  pathNames = ['process.'+p.label_() for p in beforeHLT]
2148  result += ','.join(pathNames)+')\n'
2149  result += 'process.schedule.extend(process.HLTSchedule)\n'
2150  pathNames = ['process.'+p.label_() for p in afterHLT]
2151  result += 'process.schedule.extend(['+','.join(pathNames)+'])\n'
2152  else:
2153  pathNames = ['process.'+p.label_() for p in self.schedule]
2154  result ='process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2155 
2156  self.pythonCfgCode += result
2157 
2158  if self._options.nThreads is not "1":
2159  self.pythonCfgCode +="\n"
2160  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2161  self.pythonCfgCode +="process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+")\n"
2162  self.pythonCfgCode +="process.options.numberOfStreams=cms.untracked.uint32(0)\n"
2163  #repacked version
2164  if self._options.isRepacked:
2165  self.pythonCfgCode +="\n"
2166  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2167  self.pythonCfgCode +="MassReplaceInputTag(process)\n"
2168  MassReplaceInputTag(self.process)
2169 
2170  # special treatment in case of production filter sequence 2/2
2171  if self.productionFilterSequence:
2172  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2173  self.pythonCfgCode +='for path in process.paths:\n'
2174  if len(self.conditionalPaths):
2175  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2176  if len(self.excludedPaths):
2177  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2178  self.pythonCfgCode +='\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2179  pfs = getattr(self.process,self.productionFilterSequence)
2180  for path in self.process.paths:
2181  if not path in self.conditionalPaths: continue
2182  if path in self.excludedPaths: continue
2183  getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2184 
2185 
2186  # dump customise fragment
2187  self.pythonCfgCode += self.addCustomise()
2188 
2189  if self._options.runUnscheduled:
2190  # prune and delete paths
2191  #this is not supporting the blacklist at this point since I do not understand it
2192  self.pythonCfgCode+="#do not add changes to your config after this point (unless you know what you are doing)\n"
2193  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import convertToUnscheduled\n"
2194  self.pythonCfgCode+="process=convertToUnscheduled(process)\n"
2195 
2196  from FWCore.ParameterSet.Utilities import convertToUnscheduled
2197  self.process=convertToUnscheduled(self.process)
2198 
2199  #now add the unscheduled stuff
2200  for module in self.importsUnsch:
2201  self.process.load(module)
2202  self.pythonCfgCode += ("process.load('"+module+"')\n")
2203 
2204  #and clean the unscheduled stuff
2205  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import cleanUnscheduled\n"
2206  self.pythonCfgCode+="process=cleanUnscheduled(process)\n"
2207 
2208  from FWCore.ParameterSet.Utilities import cleanUnscheduled
2209  self.process=cleanUnscheduled(self.process)
2210 
2211  self.pythonCfgCode += self.addCustomise(1)
2212 
2213  self.pythonCfgCode += self.addCustomiseCmdLine()
2214 
2215  # Temporary hack to put the early delete customization after
2216  # everything else
2217  #
2218  # FIXME: remove when no longer needed
2219  self.pythonCfgCode += "\n# Add early deletion of temporary data products to reduce peak memory need\n"
2220  self.pythonCfgCode += "from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2221  self.pythonCfgCode += "process = customiseEarlyDelete(process)\n"
2222  self.pythonCfgCode += "# End adding early deletion\n"
2223  from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
2224  self.process = customiseEarlyDelete(self.process)
2225 
2226 
2227  # make the .io file
2228 
2229  if self._options.io:
2230  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2231  if not self._options.io.endswith('.io'): self._option.io+='.io'
2232  io=open(self._options.io,'w')
2233  ioJson={}
2234  if hasattr(self.process.source,"fileNames"):
2235  if len(self.process.source.fileNames.value()):
2236  ioJson['primary']=self.process.source.fileNames.value()
2237  if hasattr(self.process.source,"secondaryFileNames"):
2238  if len(self.process.source.secondaryFileNames.value()):
2239  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2240  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2241  ioJson['pileup']=self._options.pileup_input[4:]
2242  for (o,om) in self.process.outputModules_().items():
2243  ioJson[o]=om.fileName.value()
2244  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2245  if self.productionFilterSequence:
2246  ioJson['filter']=self.productionFilterSequence
2247  import json
2248  io.write(json.dumps(ioJson))
2249  return
2250 
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:34
assert(m_qm.get())
def visit
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_RECO
put the filtering path in the schedule
def massSearchReplaceAnyInputTag
Definition: helpers.py:271
def defineMixing
Definition: Mixing.py:171
def addCustomiseCmdLine
now for a useful command
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
Definition: HCMethods.h:49
inliner
needs to be in reverse order
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def convertToUnscheduled
Definition: Utilities.py:92
def cleanUnscheduled
Definition: Utilities.py:130
double split
Definition: MVATrainer.cc:139
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run