CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python
2 
3 __version__ = "$Revision: 1.19 $"
4 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
5 
6 import FWCore.ParameterSet.Config as cms
7 from FWCore.ParameterSet.Modules import _Module
8 import sys
9 import re
10 import collections
11 from subprocess import Popen,PIPE
12 import FWCore.ParameterSet.DictTypes as DictTypes
13 class Options:
14  pass
15 
16 # the canonical defaults
17 defaultOptions = Options()
18 defaultOptions.datamix = 'DataOnSim'
19 defaultOptions.isMC=False
20 defaultOptions.isData=True
21 defaultOptions.step=''
22 defaultOptions.pileup='NoPileUp'
23 defaultOptions.pileup_input = None
24 defaultOptions.pileup_dasoption = ''
25 defaultOptions.geometry = 'SimDB'
26 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
27 defaultOptions.magField = ''
28 defaultOptions.conditions = None
29 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
30 defaultOptions.harvesting= 'AtRunEnd'
31 defaultOptions.gflash = False
32 defaultOptions.number = -1
33 defaultOptions.number_out = None
34 defaultOptions.arguments = ""
35 defaultOptions.name = "NO NAME GIVEN"
36 defaultOptions.evt_type = ""
37 defaultOptions.filein = ""
38 defaultOptions.dasquery=""
39 defaultOptions.dasoption=""
40 defaultOptions.secondfilein = ""
41 defaultOptions.customisation_file = []
42 defaultOptions.customisation_file_unsch = []
43 defaultOptions.customise_commands = ""
44 defaultOptions.inline_custom=False
45 defaultOptions.particleTable = 'pythiapdt'
46 defaultOptions.particleTableList = ['pythiapdt','pdt']
47 defaultOptions.dirin = ''
48 defaultOptions.dirout = ''
49 defaultOptions.filetype = 'EDM'
50 defaultOptions.fileout = 'output.root'
51 defaultOptions.filtername = ''
52 defaultOptions.lazy_download = False
53 defaultOptions.custom_conditions = ''
54 defaultOptions.hltProcess = ''
55 defaultOptions.eventcontent = None
56 defaultOptions.datatier = None
57 defaultOptions.inlineEventContent = True
58 defaultOptions.inlineObjets =''
59 defaultOptions.hideGen=False
60 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
61 defaultOptions.beamspot=None
62 defaultOptions.outputDefinition =''
63 defaultOptions.inputCommands = None
64 defaultOptions.outputCommands = None
65 defaultOptions.inputEventContent = ''
66 defaultOptions.dropDescendant = False
67 defaultOptions.relval = None
68 defaultOptions.slhc = None
69 defaultOptions.profile = None
70 defaultOptions.isRepacked = False
71 defaultOptions.restoreRNDSeeds = False
72 defaultOptions.donotDropOnInput = ''
73 defaultOptions.python_filename =''
74 defaultOptions.io=None
75 defaultOptions.lumiToProcess=None
76 defaultOptions.fast=False
77 defaultOptions.runsAndWeightsForMC = None
78 defaultOptions.runsScenarioForMC = None
79 defaultOptions.runUnscheduled = False
80 defaultOptions.timeoutOutput = False
81 defaultOptions.nThreads = '1'
82 
83 # some helper routines
84 def dumpPython(process,name):
85  theObject = getattr(process,name)
86  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
87  return "process."+name+" = " + theObject.dumpPython("process")
88  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
89  return "process."+name+" = " + theObject.dumpPython()+"\n"
90  else:
91  return "process."+name+" = " + theObject.dumpPython()+"\n"
92 def filesFromList(fileName,s=None):
93  import os
94  import FWCore.ParameterSet.Config as cms
95  prim=[]
96  sec=[]
97  for line in open(fileName,'r'):
98  if line.count(".root")>=2:
99  #two files solution...
100  entries=line.replace("\n","").split()
101  if not entries[0] in prim:
102  prim.append(entries[0])
103  if not entries[1] in sec:
104  sec.append(entries[1])
105  elif (line.find(".root")!=-1):
106  entry=line.replace("\n","")
107  if not entry in prim:
108  prim.append(entry)
109  if s:
110  if not hasattr(s,"fileNames"):
111  s.fileNames=cms.untracked.vstring(prim)
112  else:
113  s.fileNames.extend(prim)
114  if len(sec)!=0:
115  if not hasattr(s,"secondaryFileNames"):
116  s.secondaryFileNames=cms.untracked.vstring(sec)
117  else:
118  s.secondaryFileNames.extend(sec)
119  print "found files: ",prim
120  if len(prim)==0:
121  raise Exception("There are not files in input from the file list")
122  if len(sec)!=0:
123  print "found parent files:",sec
124  return (prim,sec)
125 
126 def filesFromDASQuery(query,option="",s=None):
127  import os,time
128  import FWCore.ParameterSet.Config as cms
129  prim=[]
130  sec=[]
131  print "the query is",query
132  eC=5
133  count=0
134  while eC!=0 and count<3:
135  if count!=0:
136  print 'Sleeping, then retrying DAS'
137  time.sleep(100)
138  p = Popen('dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=True)
139  pipe=p.stdout.read()
140  tupleP = os.waitpid(p.pid, 0)
141  eC=tupleP[1]
142  count=count+1
143  if eC==0:
144  print "DAS succeeded after",count,"attempts",eC
145  else:
146  print "DAS failed 3 times- I give up"
147  for line in pipe.split('\n'):
148  if line.count(".root")>=2:
149  #two files solution...
150  entries=line.replace("\n","").split()
151  if not entries[0] in prim:
152  prim.append(entries[0])
153  if not entries[1] in sec:
154  sec.append(entries[1])
155  elif (line.find(".root")!=-1):
156  entry=line.replace("\n","")
157  if not entry in prim:
158  prim.append(entry)
159  if s:
160  if not hasattr(s,"fileNames"):
161  s.fileNames=cms.untracked.vstring(prim)
162  else:
163  s.fileNames.extend(prim)
164  if len(sec)!=0:
165  if not hasattr(s,"secondaryFileNames"):
166  s.secondaryFileNames=cms.untracked.vstring(sec)
167  else:
168  s.secondaryFileNames.extend(sec)
169  print "found files: ",prim
170  if len(sec)!=0:
171  print "found parent files:",sec
172  return (prim,sec)
173 
174 def MassReplaceInputTag(aProcess,oldT="rawDataCollector",newT="rawDataRepacker"):
175  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
176  for s in aProcess.paths_().keys():
177  massSearchReplaceAnyInputTag(getattr(aProcess,s),oldT,newT)
178 
179 def anyOf(listOfKeys,dict,opt=None):
180  for k in listOfKeys:
181  if k in dict:
182  toReturn=dict[k]
183  dict.pop(k)
184  return toReturn
185  if opt!=None:
186  return opt
187  else:
188  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
189 
190 class ConfigBuilder(object):
191  """The main building routines """
192 
193  def __init__(self, options, process = None, with_output = False, with_input = False ):
194  """options taken from old cmsDriver and optparse """
195 
196  options.outfile_name = options.dirout+options.fileout
197 
198  self._options = options
199 
200  if self._options.isData and options.isMC:
201  raise Exception("ERROR: You may specify only --data or --mc, not both")
202  #if not self._options.conditions:
203  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
204 
205  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
206  if 'ENDJOB' in self._options.step:
207  if (hasattr(self._options,"outputDefinition") and \
208  self._options.outputDefinition != '' and \
209  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
210  (hasattr(self._options,"datatier") and \
211  self._options.datatier and \
212  'DQMIO' in self._options.datatier):
213  print "removing ENDJOB from steps since not compatible with DQMIO dataTier"
214  self._options.step=self._options.step.replace(',ENDJOB','')
215 
216 
217 
218  # what steps are provided by this class?
219  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
220  self.stepMap={}
221  self.stepKeys=[]
222  for step in self._options.step.split(","):
223  if step=='': continue
224  stepParts = step.split(":")
225  stepName = stepParts[0]
226  if stepName not in stepList and not stepName.startswith('re'):
227  raise ValueError("Step "+stepName+" unknown")
228  if len(stepParts)==1:
229  self.stepMap[stepName]=""
230  elif len(stepParts)==2:
231  self.stepMap[stepName]=stepParts[1].split('+')
232  elif len(stepParts)==3:
233  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
234  else:
235  raise ValueError("Step definition "+step+" invalid")
236  self.stepKeys.append(stepName)
237 
238  #print "map of steps is:",self.stepMap
239 
240  self.with_output = with_output
241  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
242  self.with_output = False
243  self.with_input = with_input
244  if process == None:
245  self.process = cms.Process(self._options.name)
246  else:
247  self.process = process
248  self.imports = []
249  self.importsUnsch = []
250  self.define_Configs()
251  self.schedule = list()
252 
253  # we are doing three things here:
254  # creating a process to catch errors
255  # building the code to re-create the process
256 
257  self.additionalCommands = []
258  # TODO: maybe a list of to be dumped objects would help as well
259  self.blacklist_paths = []
260  self.addedObjects = []
261  self.additionalOutputs = {}
262 
263  self.productionFilterSequence = None
264  self.nextScheduleIsConditional=False
265  self.conditionalPaths=[]
266  self.excludedPaths=[]
267 
268  def profileOptions(self):
269  """
270  addIgProfService
271  Function to add the igprof profile service so that you can dump in the middle
272  of the run.
273  """
274  profileOpts = self._options.profile.split(':')
275  profilerStart = 1
276  profilerInterval = 100
277  profilerFormat = None
278  profilerJobFormat = None
279 
280  if len(profileOpts):
281  #type, given as first argument is unused here
282  profileOpts.pop(0)
283  if len(profileOpts):
284  startEvent = profileOpts.pop(0)
285  if not startEvent.isdigit():
286  raise Exception("%s is not a number" % startEvent)
287  profilerStart = int(startEvent)
288  if len(profileOpts):
289  eventInterval = profileOpts.pop(0)
290  if not eventInterval.isdigit():
291  raise Exception("%s is not a number" % eventInterval)
292  profilerInterval = int(eventInterval)
293  if len(profileOpts):
294  profilerFormat = profileOpts.pop(0)
295 
296 
297  if not profilerFormat:
298  profilerFormat = "%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace("_cfi", ""),
299  self._options.step,
300  self._options.pileup,
301  self._options.conditions,
302  self._options.datatier,
303  self._options.profileTypeLabel)
304  if not profilerJobFormat and profilerFormat.endswith(".gz"):
305  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
306  elif not profilerJobFormat:
307  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
308 
309  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
310 
311  def load(self,includeFile):
312  includeFile = includeFile.replace('/','.')
313  self.process.load(includeFile)
314  return sys.modules[includeFile]
315 
316  def loadAndRemember(self, includeFile,unsch=0):
317  """helper routine to load am memorize imports"""
318  # we could make the imports a on-the-fly data method of the process instance itself
319  # not sure if the latter is a good idea
320  includeFile = includeFile.replace('/','.')
321  if unsch==0:
322  self.imports.append(includeFile)
323  self.process.load(includeFile)
324  return sys.modules[includeFile]
325  else:
326  self.importsUnsch.append(includeFile)
327  return 0#sys.modules[includeFile]
328 
329  def executeAndRemember(self, command):
330  """helper routine to remember replace statements"""
331  self.additionalCommands.append(command)
332  if not command.strip().startswith("#"):
333  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
334  import re
335  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
336  #exec(command.replace("process.","self.process."))
337 
338  def addCommon(self):
339  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
340  self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring('ProductNotFound'),fileMode = cms.untracked.string('FULLMERGE'))
341  else:
342  self.process.options = cms.untracked.PSet( )
343 
344  if self._options.runUnscheduled:
345  self.process.options.allowUnscheduled=cms.untracked.bool(True)
346 
347  self.addedObjects.append(("","options"))
348 
349  if self._options.lazy_download:
350  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
351  stats = cms.untracked.bool(True),
352  enable = cms.untracked.bool(True),
353  cacheHint = cms.untracked.string("lazy-download"),
354  readHint = cms.untracked.string("read-ahead-buffered")
355  )
356  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
357 
358  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
359  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
360 
361  if self._options.profile:
362  (start, interval, eventFormat, jobFormat)=self.profileOptions()
363  self.process.IgProfService = cms.Service("IgProfService",
364  reportFirstEvent = cms.untracked.int32(start),
365  reportEventInterval = cms.untracked.int32(interval),
366  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
367  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
368  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
369 
370  def addMaxEvents(self):
371  """Here we decide how many evts will be processed"""
372  self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(int(self._options.number)))
373  if self._options.number_out:
374  self.process.maxEvents.output = cms.untracked.int32(int(self._options.number_out))
375  self.addedObjects.append(("","maxEvents"))
376 
377  def addSource(self):
378  """Here the source is built. Priority: file, generator"""
379  self.addedObjects.append(("Input source","source"))
380 
381  def filesFromOption(self):
382  for entry in self._options.filein.split(','):
383  print "entry",entry
384  if entry.startswith("filelist:"):
385  filesFromList(entry[9:],self.process.source)
386  elif entry.startswith("dbs:") or entry.startswith("das:"):
387  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
388  else:
389  self.process.source.fileNames.append(self._options.dirin+entry)
390  if self._options.secondfilein:
391  if not hasattr(self.process.source,"secondaryFileNames"):
392  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
393  for entry in self._options.secondfilein.split(','):
394  print "entry",entry
395  if entry.startswith("filelist:"):
396  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
397  elif entry.startswith("dbs:") or entry.startswith("das:"):
398  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
399  else:
400  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
401 
402  if self._options.filein or self._options.dasquery:
403  if self._options.filetype == "EDM":
404  self.process.source=cms.Source("PoolSource",
405  fileNames = cms.untracked.vstring(),
406  secondaryFileNames= cms.untracked.vstring())
407  filesFromOption(self)
408  elif self._options.filetype == "DAT":
409  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
410  filesFromOption(self)
411  elif self._options.filetype == "LHE":
412  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
413  if self._options.filein.startswith("lhe:"):
414  #list the article directory automatically
415  args=self._options.filein.split(':')
416  article=args[1]
417  print 'LHE input from article ',article
418  location='/store/lhe/'
419  import os
420  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
421  for line in textOfFiles:
422  for fileName in [x for x in line.split() if '.lhe' in x]:
423  self.process.source.fileNames.append(location+article+'/'+fileName)
424  #check first if list of LHE files is loaded (not empty)
425  if len(line)<2:
426  print 'Issue to load LHE files, please check and try again.'
427  sys.exit(-1)
428  if len(args)>2:
429  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
430  else:
431  filesFromOption(self)
432 
433 
434  elif self._options.filetype == "DQM":
435  self.process.source=cms.Source("DQMRootSource",
436  fileNames = cms.untracked.vstring())
437  filesFromOption(self)
438 
439  elif self._options.filetype == "DQMDAQ":
440  # FIXME: how to configure it if there are no input files specified?
441  self.process.source=cms.Source("DQMStreamerReader")
442 
443 
444  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
445  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
446 
447  if self._options.dasquery!='':
448  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
449  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
450 
451  ##drop LHEXMLStringProduct on input to save memory if appropriate
452  if 'GEN' in self.stepMap.keys():
453  if self._options.inputCommands:
454  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
455  else:
456  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
457 
458  if self.process.source and self._options.inputCommands:
459  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
460  for command in self._options.inputCommands.split(','):
461  # remove whitespace around the keep/drop statements
462  command = command.strip()
463  if command=='': continue
464  self.process.source.inputCommands.append(command)
465  if not self._options.dropDescendant:
466  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
467 
468  if self._options.lumiToProcess:
469  import FWCore.PythonUtilities.LumiList as LumiList
470  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
471 
472  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
473  if self.process.source is None:
474  self.process.source=cms.Source("EmptySource")
475 
476  # modify source in case of run-dependent MC
477  self.runsAndWeights=None
478  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
479  if not self._options.isMC :
480  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
481  if self._options.runsAndWeightsForMC:
482  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
483  else:
484  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
485  if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
486  __import__(RunsAndWeights[self._options.runsScenarioForMC])
487  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
488  else:
489  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
490 
491  if self.runsAndWeights:
492  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
493  ThrowAndSetRandomRun.throwAndSetRandomRun(self.process.source,self.runsAndWeights)
494  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
495  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
496 
497  return
498 
499  def addOutput(self):
500  """ Add output module to the process """
501  result=""
502  if self._options.outputDefinition:
503  if self._options.datatier:
504  print "--datatier & --eventcontent options ignored"
505 
506  #new output convention with a list of dict
507  outList = eval(self._options.outputDefinition)
508  for (id,outDefDict) in enumerate(outList):
509  outDefDictStr=outDefDict.__str__()
510  if not isinstance(outDefDict,dict):
511  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
512  #requires option: tier
513  theTier=anyOf(['t','tier','dataTier'],outDefDict)
514  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
515  ## event content
516  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
517  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
518  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
519  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
520  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
521  # module label has a particular role
522  if not theModuleLabel:
523  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
524  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
525  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
526  ]
527  for name in tryNames:
528  if not hasattr(self.process,name):
529  theModuleLabel=name
530  break
531  if not theModuleLabel:
532  raise Exception("cannot find a module label for specification: "+outDefDictStr)
533  if id==0:
534  defaultFileName=self._options.outfile_name
535  else:
536  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
537 
538  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
539  if not theFileName.endswith('.root'):
540  theFileName+='.root'
541 
542  if len(outDefDict.keys()):
543  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
544  if theStreamType=='DQMIO': theStreamType='DQM'
545  if theStreamType=='ALL':
546  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
547  else:
548  theEventContent = getattr(self.process, theStreamType+"EventContent")
549 
550 
551  addAlCaSelects=False
552  if theStreamType=='ALCARECO' and not theFilterName:
553  theFilterName='StreamALCACombined'
554  addAlCaSelects=True
555 
556  CppType='PoolOutputModule'
557  if self._options.timeoutOutput:
558  CppType='TimeoutPoolOutputModule'
559  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
560  output = cms.OutputModule(CppType,
561  theEventContent.clone(),
562  fileName = cms.untracked.string(theFileName),
563  dataset = cms.untracked.PSet(
564  dataTier = cms.untracked.string(theTier),
565  filterName = cms.untracked.string(theFilterName))
566  )
567  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
568  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
569  if not theSelectEvent and hasattr(self.process,'filtering_step'):
570  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
571  if theSelectEvent:
572  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
573 
574  if addAlCaSelects:
575  if not hasattr(output,'SelectEvents'):
576  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
577  for alca in self.AlCaPaths:
578  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
579 
580 
581  if hasattr(self.process,theModuleLabel):
582  raise Exception("the current process already has a module "+theModuleLabel+" defined")
583  #print "creating output module ",theModuleLabel
584  setattr(self.process,theModuleLabel,output)
585  outputModule=getattr(self.process,theModuleLabel)
586  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
587  path=getattr(self.process,theModuleLabel+'_step')
588  self.schedule.append(path)
589 
590  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
591  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"):
592  return label
593  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
594  if theExtraOutputCommands:
595  if not isinstance(theExtraOutputCommands,list):
596  raise Exception("extra ouput command in --option must be a list of strings")
597  if hasattr(self.process,theStreamType+"EventContent"):
598  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
599  else:
600  outputModule.outputCommands.extend(theExtraOutputCommands)
601 
602  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
603 
604  ##ends the --output options model
605  return result
606 
607  streamTypes=self._options.eventcontent.split(',')
608  tiers=self._options.datatier.split(',')
609  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
610  raise Exception("number of event content arguments does not match number of datatier arguments")
611 
612  # if the only step is alca we don't need to put in an output
613  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
614  return "\n"
615 
616  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
617  if streamType=='': continue
618  if streamType=='DQMIO': streamType='DQM'
619  theEventContent = getattr(self.process, streamType+"EventContent")
620  if i==0:
621  theFileName=self._options.outfile_name
622  theFilterName=self._options.filtername
623  else:
624  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
625  theFilterName=self._options.filtername
626  CppType='PoolOutputModule'
627  if self._options.timeoutOutput:
628  CppType='TimeoutPoolOutputModule'
629  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
630  output = cms.OutputModule(CppType,
631  theEventContent,
632  fileName = cms.untracked.string(theFileName),
633  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
634  filterName = cms.untracked.string(theFilterName)
635  )
636  )
637  if hasattr(self.process,"generation_step") and streamType!='LHE':
638  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
639  if hasattr(self.process,"filtering_step"):
640  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
641 
642  if streamType=='ALCARECO':
643  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
644 
645  if "MINIAOD" in streamType:
646  output.dropMetaData = cms.untracked.string('ALL')
647  output.fastCloning= cms.untracked.bool(False)
648  output.overrideInputFileSplitLevels = cms.untracked.bool(True)
649 
650  outputModuleName=streamType+'output'
651  setattr(self.process,outputModuleName,output)
652  outputModule=getattr(self.process,outputModuleName)
653  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
654  path=getattr(self.process,outputModuleName+'_step')
655  self.schedule.append(path)
656 
657  if self._options.outputCommands and streamType!='DQM':
658  for evct in self._options.outputCommands.split(','):
659  if not evct: continue
660  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
661 
662  if not self._options.inlineEventContent:
663  def doNotInlineEventContent(instance,label = "process."+streamType+"EventContent.outputCommands"):
664  return label
665  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
666 
667  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
668 
669  return result
670 
672  """
673  Add selected standard sequences to the process
674  """
675  # load the pile up file
676  if self._options.pileup:
677  pileupSpec=self._options.pileup.split(',')[0]
678 
679  # Does the requested pile-up scenario exist?
680  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
681  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
682  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
683  raise Exception(message)
684 
685  # Put mixing parameters in a dictionary
686  if '.' in pileupSpec:
687  mixingDict={'file':pileupSpec}
688  elif pileupSpec.startswith('file:'):
689  mixingDict={'file':pileupSpec[5:]}
690  else:
691  import copy
692  mixingDict=copy.copy(Mixing[pileupSpec])
693  if len(self._options.pileup.split(','))>1:
694  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
695 
696  # Load the pu cfg file corresponding to the requested pu scenario
697  if 'file:' in pileupSpec:
698  #the file is local
699  self.process.load(mixingDict['file'])
700  print "inlining mixing module configuration"
701  self._options.inlineObjets+=',mix'
702  else:
703  self.loadAndRemember(mixingDict['file'])
704 
705  mixingDict.pop('file')
706  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
707  if self._options.pileup_input:
708  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
709  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
710  else:
711  mixingDict['F']=self._options.pileup_input.split(',')
712  specialization=defineMixing(mixingDict)
713  for command in specialization:
714  self.executeAndRemember(command)
715  if len(mixingDict)!=0:
716  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
717 
718 
719  # load the geometry file
720  try:
721  if len(self.stepMap):
722  self.loadAndRemember(self.GeometryCFF)
723  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
724  self.loadAndRemember(self.SimGeometryCFF)
725  if self.geometryDBLabel:
726  self.executeAndRemember('process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
727  except ImportError:
728  print "Geometry option",self._options.geometry,"unknown."
729  raise
730 
731  if len(self.stepMap):
732  self.loadAndRemember(self.magFieldCFF)
733 
734  for stepName in self.stepKeys:
735  stepSpec = self.stepMap[stepName]
736  print "Step:", stepName,"Spec:",stepSpec
737  if stepName.startswith('re'):
738  ##add the corresponding input content
739  if stepName[2:] not in self._options.donotDropOnInput:
740  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
741  stepName=stepName[2:]
742  if stepSpec=="":
743  getattr(self,"prepare_"+stepName)(sequence = getattr(self,stepName+"DefaultSeq"))
744  elif type(stepSpec)==list:
745  getattr(self,"prepare_"+stepName)(sequence = '+'.join(stepSpec))
746  elif type(stepSpec)==tuple:
747  getattr(self,"prepare_"+stepName)(sequence = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
748  else:
749  raise ValueError("Invalid step definition")
750 
751  if self._options.restoreRNDSeeds!=False:
752  #it is either True, or a process name
753  if self._options.restoreRNDSeeds==True:
754  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
755  else:
756  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
757  if self._options.inputEventContent or self._options.inputCommands:
758  if self._options.inputCommands:
759  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
760  else:
761  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
762 
763 
765  if self._options.inputEventContent:
766  import copy
767  def dropSecondDropStar(iec):
768  #drop occurence of 'drop *' in the list
769  count=0
770  for item in iec:
771  if item=='drop *':
772  if count!=0:
773  iec.remove(item)
774  count+=1
775 
776 
777  ## allow comma separated input eventcontent
778  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
779  for evct in self._options.inputEventContent.split(','):
780  if evct=='': continue
781  theEventContent = getattr(self.process, evct+"EventContent")
782  if hasattr(theEventContent,'outputCommands'):
783  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
784  if hasattr(theEventContent,'inputCommands'):
785  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
786 
787  dropSecondDropStar(self.process.source.inputCommands)
788 
789  if not self._options.dropDescendant:
790  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
791 
792 
793  return
794 
795  def addConditions(self):
796  """Add conditions to the process"""
797  if not self._options.conditions: return
798 
799  if 'FrontierConditions_GlobalTag' in self._options.conditions:
800  print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line'
801  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
802 
803  self.loadAndRemember(self.ConditionsDefaultCFF)
804  from Configuration.AlCa.GlobalTag import GlobalTag
805  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
806  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
807  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
808 
809  if self._options.slhc:
810  self.loadAndRemember("SLHCUpgradeSimulations/Geometry/fakeConditions_%s_cff"%(self._options.slhc,))
811 
812 
813  def addCustomise(self,unsch=0):
814  """Include the customise code """
815 
816  custOpt=[]
817  if unsch==0:
818  for c in self._options.customisation_file:
819  custOpt.extend(c.split(","))
820  else:
821  for c in self._options.customisation_file_unsch:
822  custOpt.extend(c.split(","))
823 
825  for opt in custOpt:
826  if opt=='': continue
827  if opt.count('.')>1:
828  raise Exception("more than . in the specification:"+opt)
829  fileName=opt.split('.')[0]
830  if opt.count('.')==0: rest='customise'
831  else:
832  rest=opt.split('.')[1]
833  if rest=='py': rest='customise' #catch the case of --customise file.py
834 
835  if fileName in custMap:
836  custMap[fileName].extend(rest.split('+'))
837  else:
838  custMap[fileName]=rest.split('+')
839 
840  if len(custMap)==0:
841  final_snippet='\n'
842  else:
843  final_snippet='\n# customisation of the process.\n'
844 
845  allFcn=[]
846  for opt in custMap:
847  allFcn.extend(custMap[opt])
848  for fcn in allFcn:
849  if allFcn.count(fcn)!=1:
850  raise Exception("cannot specify twice "+fcn+" as a customisation method")
851 
852  for f in custMap:
853  # let python search for that package and do syntax checking at the same time
854  packageName = f.replace(".py","").replace("/",".")
855  __import__(packageName)
856  package = sys.modules[packageName]
857 
858  # now ask the package for its definition and pick .py instead of .pyc
859  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
860 
861  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
862  if self._options.inline_custom:
863  for line in file(customiseFile,'r'):
864  if "import FWCore.ParameterSet.Config" in line:
865  continue
866  final_snippet += line
867  else:
868  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
869  for fcn in custMap[f]:
870  print "customising the process with",fcn,"from",f
871  if not hasattr(package,fcn):
872  #bound to fail at run time
873  raise Exception("config "+f+" has no function "+fcn)
874  #execute the command
875  self.process=getattr(package,fcn)(self.process)
876  #and print it in the configuration
877  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
878  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
879 
880  if len(custMap)!=0:
881  final_snippet += '\n# End of customisation functions\n'
882 
883  ### now for a useful command
884  if unsch==1 or not self._options.runUnscheduled:
885  if self._options.customise_commands:
886  import string
887  final_snippet +='\n# Customisation from command line'
888  for com in self._options.customise_commands.split('\\n'):
889  com=string.lstrip(com)
890  self.executeAndRemember(com)
891  final_snippet +='\n'+com
892 
893  return final_snippet
894 
895  #----------------------------------------------------------------------------
896  # here the methods to define the python includes for each step or
897  # conditions
898  #----------------------------------------------------------------------------
899  def define_Configs(self):
900  if len(self.stepMap):
901  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
902  if self._options.particleTable not in defaultOptions.particleTableList:
903  print 'Invalid particle table provided. Options are:'
904  print defaultOptions.particleTable
905  sys.exit(-1)
906  else:
907  if len(self.stepMap):
908  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
909 
910  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
911 
912  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
913  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
914  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
915  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
916  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
917  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
918  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
919  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
920  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
921  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
922  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
923  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
924  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
925  self.EIDefaultCFF=None
926  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
927  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
928  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
929  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
930  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
931  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
932  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
933  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
934  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
935  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
936  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
937 
938  if "DATAMIX" in self.stepMap.keys():
939  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
940  if self._options.datamix == 'PreMix':
941  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDMPreMix_cff"
942  else:
943  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
944  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
945  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
946 
947  if "DIGIPREMIX" in self.stepMap.keys():
948  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_PreMix_cff"
949 
950  self.ALCADefaultSeq=None
951  self.LHEDefaultSeq='externalLHEProducer'
952  self.GENDefaultSeq='pgen'
953  self.SIMDefaultSeq='psim'
954  self.DIGIDefaultSeq='pdigi'
955  self.DIGIPREMIXDefaultSeq='pdigi'
956  self.DIGIPREMIX_S2DefaultSeq='pdigi'
957  self.DATAMIXDefaultSeq=None
958  self.DIGI2RAWDefaultSeq='DigiToRaw'
959  self.HLTDefaultSeq='GRun'
960  self.L1DefaultSeq=None
961  self.L1REPACKDefaultSeq='GT'
962  self.HARVESTINGDefaultSeq=None
963  self.ALCAHARVESTDefaultSeq=None
964  self.CFWRITERDefaultSeq=None
965  self.RAW2DIGIDefaultSeq='RawToDigi'
966  self.L1RecoDefaultSeq='L1Reco'
967  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
968  if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
969  self.RECODefaultSeq='reconstruction'
970  else:
971  self.RECODefaultSeq='reconstruction_fromRECO'
972 
973  self.EIDefaultSeq='top'
974  self.POSTRECODefaultSeq=None
975  self.L1HwValDefaultSeq='L1HwVal'
976  self.DQMDefaultSeq='DQMOffline'
977  self.VALIDATIONDefaultSeq=''
978  self.ENDJOBDefaultSeq='endOfProcess'
979  self.REPACKDefaultSeq='DigiToRawRepack'
980  self.PATDefaultSeq='miniAOD'
981 
982  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
983 
984  if not self._options.beamspot:
985  self._options.beamspot=VtxSmearedDefaultKey
986 
987  # if its MC then change the raw2digi
988  if self._options.isMC==True:
989  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
990  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
991  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
992  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
993  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
994  else:
995  self._options.beamspot = None
996 
997  #patch for gen, due to backward incompatibility
998  if 'reGEN' in self.stepMap:
999  self.GENDefaultSeq='fixGenInfo'
1000 
1001  if self._options.scenario=='cosmics':
1002  self._options.pileup='Cosmics'
1003  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1004  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1005  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1006  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1007  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1008  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1009  if self._options.isMC==True:
1010  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1011  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1012  self.RECODefaultSeq='reconstructionCosmics'
1013  self.DQMDefaultSeq='DQMOfflineCosmics'
1014 
1015  if self._options.scenario=='HeavyIons':
1016  if not self._options.beamspot:
1017  self._options.beamspot=VtxSmearedHIDefaultKey
1018  self.HLTDefaultSeq = 'HIon'
1019  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1020  self.VALIDATIONDefaultSeq=''
1021  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1022  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1023  self.RECODefaultSeq='reconstructionHeavyIons'
1024  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1025  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1026  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1027  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1028  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1029  if self._options.isMC==True:
1030  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1031 
1032 
1033  self.RAW2RECODefaultSeq=','.join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1034 
1035  self.USERDefaultSeq='user'
1036  self.USERDefaultCFF=None
1037 
1038  # the magnetic field
1039  if self._options.isData:
1040  if self._options.magField==defaultOptions.magField:
1041  print "magnetic field option forced to: AutoFromDBCurrent"
1042  self._options.magField='AutoFromDBCurrent'
1043  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1044  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1045 
1046  # the geometry
1047  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1048  self.geometryDBLabel=None
1049  simGeometry=''
1050  if self._options.fast:
1051  if 'start' in self._options.conditions.lower():
1052  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1053  else:
1054  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1055  else:
1056  def inGeometryKeys(opt):
1057  from Configuration.StandardSequences.GeometryConf import GeometryConf
1058  if opt in GeometryConf:
1059  return GeometryConf[opt]
1060  else:
1061  return opt
1062 
1063  geoms=self._options.geometry.split(',')
1064  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1065  if len(geoms)==2:
1066  #may specify the reco geometry
1067  if '/' in geoms[1] or '_cff' in geoms[1]:
1068  self.GeometryCFF=geoms[1]
1069  else:
1070  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1071 
1072  if (geoms[0].startswith('DB:')):
1073  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1074  self.geometryDBLabel=geoms[0][3:]
1075  print "with DB:"
1076  else:
1077  if '/' in geoms[0] or '_cff' in geoms[0]:
1078  self.SimGeometryCFF=geoms[0]
1079  else:
1080  simGeometry=geoms[0]
1081  if self._options.gflash==True:
1082  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1083  else:
1084  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1085 
1086  # synchronize the geometry configuration and the FullSimulation sequence to be used
1087  if simGeometry not in defaultOptions.geometryExtendedOptions:
1088  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1089 
1090  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1091  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1092  self._options.beamspot='NoSmear'
1093 
1094  # fastsim requires some changes to the default cff files and sequences
1095  if self._options.fast:
1096  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1097  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1098  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1099  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1100  self.L1RecoDefaultCFF='FastSimulation.Configuration.L1Reco_cff'
1101  self.DQMOFFLINEDefaultCFF="FastSimulation.Configuration.DQMOfflineMC_cff"
1102 
1103  # Mixing
1104  if self._options.pileup=='default':
1105  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1106  self._options.pileup=MixingDefaultKey
1107 
1108 
1109  #not driven by a default cff anymore
1110  if self._options.isData:
1111  self._options.pileup=None
1112 
1113  if self._options.slhc:
1114  self.GeometryCFF='SLHCUpgradeSimulations.Geometry.%s_cmsSimIdealGeometryXML_cff'%(self._options.slhc,)
1115  if 'stdgeom' not in self._options.slhc:
1116  self.SimGeometryCFF='SLHCUpgradeSimulations.Geometry.%s_cmsSimIdealGeometryXML_cff'%(self._options.slhc,)
1117  self.DIGIDefaultCFF='SLHCUpgradeSimulations/Geometry/Digi_%s_cff'%(self._options.slhc,)
1118  if self._options.pileup!=defaultOptions.pileup:
1119  self._options.pileup='SLHC_%s_%s'%(self._options.pileup,self._options.slhc)
1120 
1121  self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1122 
1123  # for alca, skims, etc
1124  def addExtraStream(self,name,stream,workflow='full'):
1125  # define output module and go from there
1126  output = cms.OutputModule("PoolOutputModule")
1127  if stream.selectEvents.parameters_().__len__()!=0:
1128  output.SelectEvents = stream.selectEvents
1129  else:
1130  output.SelectEvents = cms.untracked.PSet()
1131  output.SelectEvents.SelectEvents=cms.vstring()
1132  if isinstance(stream.paths,tuple):
1133  for path in stream.paths:
1134  output.SelectEvents.SelectEvents.append(path.label())
1135  else:
1136  output.SelectEvents.SelectEvents.append(stream.paths.label())
1137 
1138 
1139 
1140  if isinstance(stream.content,str):
1141  evtPset=getattr(self.process,stream.content)
1142  for p in evtPset.parameters_():
1143  setattr(output,p,getattr(evtPset,p))
1144  if not self._options.inlineEventContent:
1145  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1146  return label
1147  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1148  else:
1149  output.outputCommands = stream.content
1150 
1151 
1152  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1153 
1154  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1155  filterName = cms.untracked.string(stream.name))
1156 
1157  if self._options.filtername:
1158  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1159 
1160  #add an automatic flushing to limit memory consumption
1161  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1162 
1163  if workflow in ("producers,full"):
1164  if isinstance(stream.paths,tuple):
1165  for path in stream.paths:
1166  self.schedule.append(path)
1167  else:
1168  self.schedule.append(stream.paths)
1169 
1170 
1171  # in case of relvals we don't want to have additional outputs
1172  if (not self._options.relval) and workflow in ("full","output"):
1173  self.additionalOutputs[name] = output
1174  setattr(self.process,name,output)
1175 
1176  if workflow == 'output':
1177  # adjust the select events to the proper trigger results from previous process
1178  filterList = output.SelectEvents.SelectEvents
1179  for i, filter in enumerate(filterList):
1180  filterList[i] = filter+":"+self._options.triggerResultsProcess
1181 
1182  return output
1183 
1184  #----------------------------------------------------------------------------
1185  # here the methods to create the steps. Of course we are doing magic here ;)
1186  # prepare_STEPNAME modifies self.process and what else's needed.
1187  #----------------------------------------------------------------------------
1188 
1189  def loadDefaultOrSpecifiedCFF(self, sequence,defaultCFF,unsch=0):
1190  if ( len(sequence.split('.'))==1 ):
1191  l=self.loadAndRemember(defaultCFF,unsch)
1192  elif ( len(sequence.split('.'))==2 ):
1193  l=self.loadAndRemember(sequence.split('.')[0],unsch)
1194  sequence=sequence.split('.')[1]
1195  else:
1196  print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a"
1197  print sequence,"not recognized"
1198  raise
1199  return l
1200 
1201  def scheduleSequence(self,seq,prefix,what='Path'):
1202  if '*' in seq:
1203  #create only one path with all sequences in it
1204  for i,s in enumerate(seq.split('*')):
1205  if i==0:
1206  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1207  else:
1208  p=getattr(self.process,prefix)
1209  p+=getattr(self.process, s)
1210  self.schedule.append(getattr(self.process,prefix))
1211  return
1212  else:
1213  #create as many path as many sequences
1214  if not '+' in seq:
1215  if self.nextScheduleIsConditional:
1216  self.conditionalPaths.append(prefix)
1217  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1218  self.schedule.append(getattr(self.process,prefix))
1219  else:
1220  for i,s in enumerate(seq.split('+')):
1221  sn=prefix+'%d'%(i)
1222  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1223  self.schedule.append(getattr(self.process,sn))
1224  return
1225 
1226  def scheduleSequenceAtEnd(self,seq,prefix):
1227  self.scheduleSequence(seq,prefix,what='EndPath')
1228  return
1229 
1230  def prepare_ALCAPRODUCER(self, sequence = None):
1231  self.prepare_ALCA(sequence, workflow = "producers")
1232 
1233  def prepare_ALCAOUTPUT(self, sequence = None):
1234  self.prepare_ALCA(sequence, workflow = "output")
1235 
1236  def prepare_ALCA(self, sequence = None, workflow = 'full'):
1237  """ Enrich the process with alca streams """
1238  print 'DL enriching',workflow,sequence
1239  alcaConfig=self.loadDefaultOrSpecifiedCFF(sequence,self.ALCADefaultCFF)
1240  sequence = sequence.split('.')[-1]
1241 
1242  # decide which ALCA paths to use
1243  alcaList = sequence.split("+")
1244  maxLevel=0
1245  from Configuration.AlCa.autoAlca import autoAlca
1246  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1247  self.expandMapping(alcaList,autoAlca)
1248  self.AlCaPaths=[]
1249  for name in alcaConfig.__dict__:
1250  alcastream = getattr(alcaConfig,name)
1251  shortName = name.replace('ALCARECOStream','')
1252  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1253  output = self.addExtraStream(name,alcastream, workflow = workflow)
1254  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1255  self.AlCaPaths.append(shortName)
1256  if 'DQM' in alcaList:
1257  if not self._options.inlineEventContent and hasattr(self.process,name):
1258  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1259  else:
1260  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1261 
1262  #rename the HLT process name in the alca modules
1263  if self._options.hltProcess or 'HLT' in self.stepMap:
1264  if isinstance(alcastream.paths,tuple):
1265  for path in alcastream.paths:
1266  self.renameHLTprocessInSequence(path.label())
1267  else:
1268  self.renameHLTprocessInSequence(alcastream.paths.label())
1269 
1270  for i in range(alcaList.count(shortName)):
1271  alcaList.remove(shortName)
1272 
1273  # DQM needs a special handling
1274  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1275  path = getattr(alcaConfig,name)
1276  self.schedule.append(path)
1277  alcaList.remove('DQM')
1278 
1279  if isinstance(alcastream,cms.Path):
1280  #black list the alca path so that they do not appear in the cfg
1281  self.blacklist_paths.append(alcastream)
1282 
1283 
1284  if len(alcaList) != 0:
1285  available=[]
1286  for name in alcaConfig.__dict__:
1287  alcastream = getattr(alcaConfig,name)
1288  if isinstance(alcastream,cms.FilteredStream):
1289  available.append(name.replace('ALCARECOStream',''))
1290  print "The following alcas could not be found "+str(alcaList)
1291  print "available ",available
1292  #print "verify your configuration, ignoring for now"
1293  raise Exception("The following alcas could not be found "+str(alcaList))
1294 
1295  def prepare_LHE(self, sequence = None):
1296  #load the fragment
1297  ##make it loadable
1298  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1299  print "Loading lhe fragment from",loadFragment
1300  __import__(loadFragment)
1301  self.process.load(loadFragment)
1302  ##inline the modules
1303  self._options.inlineObjets+=','+sequence
1304 
1305  getattr(self.process,sequence).nEvents = int(self._options.number)
1306 
1307  #schedule it
1308  self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1309  self.excludedPaths.append("lhe_step")
1310  self.schedule.append( self.process.lhe_step )
1311 
1312  def prepare_GEN(self, sequence = None):
1313  """ load the fragment of generator configuration """
1314  loadFailure=False
1315  #remove trailing .py
1316  #support old style .cfi by changing into something.cfi into something_cfi
1317  #remove python/ from the name
1318  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1319  #standard location of fragments
1320  if not '/' in loadFragment:
1321  loadFragment='Configuration.Generator.'+loadFragment
1322  else:
1323  loadFragment=loadFragment.replace('/','.')
1324  try:
1325  print "Loading generator fragment from",loadFragment
1326  __import__(loadFragment)
1327  except:
1328  loadFailure=True
1329  #if self.process.source and self.process.source.type_()=='EmptySource':
1330  if not (self._options.filein or self._options.dasquery):
1331  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1332 
1333  if not loadFailure:
1334  generatorModule=sys.modules[loadFragment]
1335  genModules=generatorModule.__dict__
1336  #remove lhe producer module since this should have been
1337  #imported instead in the LHE step
1338  if self.LHEDefaultSeq in genModules:
1339  del genModules[self.LHEDefaultSeq]
1340 
1341  if self._options.hideGen:
1342  self.loadAndRemember(loadFragment)
1343  else:
1344  self.process.load(loadFragment)
1345  # expose the objects from that fragment to the configuration
1346  import FWCore.ParameterSet.Modules as cmstypes
1347  for name in genModules:
1348  theObject = getattr(generatorModule,name)
1349  if isinstance(theObject, cmstypes._Module):
1350  self._options.inlineObjets=name+','+self._options.inlineObjets
1351  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1352  self._options.inlineObjets+=','+name
1353 
1354  if sequence == self.GENDefaultSeq or sequence == 'pgen_genonly':
1355  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1356  self.productionFilterSequence = 'ProductionFilterSequence'
1357  elif 'generator' in genModules:
1358  self.productionFilterSequence = 'generator'
1359 
1360  """ Enrich the schedule with the rest of the generation step """
1361  self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1362  genSeqName=sequence.split('.')[-1]
1363 
1364  if True:
1365  try:
1366  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1367  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1368  self.loadAndRemember(cffToBeLoaded)
1369  except ImportError:
1370  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1371 
1372  if self._options.scenario == 'HeavyIons':
1373  if self._options.pileup=='HiMixGEN':
1374  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1375  else:
1376  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1377 
1378  self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1379  self.schedule.append(self.process.generation_step)
1380 
1381  #register to the genstepfilter the name of the path (static right now, but might evolve)
1382  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1383 
1384  if 'reGEN' in self.stepMap:
1385  #stop here
1386  return
1387 
1388  """ Enrich the schedule with the summary of the filter step """
1389  #the gen filter in the endpath
1390  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1391  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1392  return
1393 
1394  def prepare_SIM(self, sequence = None):
1395  """ Enrich the schedule with the simulation step"""
1396  self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1397  if not self._options.fast:
1398  if self._options.gflash==True:
1399  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1400 
1401  if self._options.magField=='0T':
1402  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1403  else:
1404  if self._options.magField=='0T':
1405  self.executeAndRemember("process.famosSimHits.UseMagneticField = cms.bool(False)")
1406 
1407  self.scheduleSequence(sequence.split('.')[-1],'simulation_step')
1408  return
1409 
1410  def prepare_DIGI(self, sequence = None):
1411  """ Enrich the schedule with the digitisation step"""
1412  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1413 
1414  if self._options.gflash==True:
1415  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1416 
1417  if sequence == 'pdigi_valid' or sequence == 'pdigi_hi':
1418  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1419 
1420  if sequence != 'pdigi_nogen' and sequence != 'pdigi_valid_nogen' and not self.process.source.type_()=='EmptySource':
1421  if self._options.inputEventContent=='':
1422  self._options.inputEventContent='REGEN'
1423  else:
1424  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1425 
1426 
1427  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1428  return
1429 
1430  def prepare_DIGIPREMIX(self, sequence = None):
1431  """ Enrich the schedule with the digitisation step"""
1432  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1433 
1434  self.loadAndRemember("SimGeneral/MixingModule/digi_noNoise_cfi")
1435 
1436  if sequence == 'pdigi_valid':
1437  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1438  else:
1439  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1440 
1441  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1442  return
1443 
1444  def prepare_DIGIPREMIX_S2(self, sequence = None):
1445  """ Enrich the schedule with the digitisation step"""
1446  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1447 
1448  self.loadAndRemember("SimGeneral/MixingModule/digi_MixPreMix_cfi")
1449 
1450 
1451  if sequence == 'pdigi_valid':
1452  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1453  else:
1454  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1455 
1456  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1457  return
1458 
1459  def prepare_CFWRITER(self, sequence = None):
1460  """ Enrich the schedule with the crossing frame writer step"""
1461  self.loadAndRemember(self.CFWRITERDefaultCFF)
1462  self.scheduleSequence('pcfw','cfwriter_step')
1463  return
1464 
1465  def prepare_DATAMIX(self, sequence = None):
1466  """ Enrich the schedule with the digitisation step"""
1467  self.loadAndRemember(self.DATAMIXDefaultCFF)
1468  self.scheduleSequence('pdatamix','datamixing_step')
1469 
1470  if self._options.pileup_input:
1471  theFiles=''
1472  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1473  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1474  elif self._options.pileup_input.startswith("filelist:"):
1475  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1476  else:
1477  theFiles=self._options.pileup_input.split(',')
1478  #print theFiles
1479  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1480 
1481  return
1482 
1483  def prepare_DIGI2RAW(self, sequence = None):
1484  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGI2RAWDefaultCFF)
1485  self.scheduleSequence(sequence.split('.')[-1],'digi2raw_step')
1486  if "DIGIPREMIX" in self.stepMap.keys():
1487  self.executeAndRemember("process.esDigiToRaw.Label = cms.string('mix')") ##terrible hack - bypass zero suppression
1488  self.executeAndRemember("process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')") ##special readout mode for StripTracker
1489 
1490  return
1491 
1492  def prepare_REPACK(self, sequence = None):
1493  self.loadDefaultOrSpecifiedCFF(sequence,self.REPACKDefaultCFF)
1494  self.scheduleSequence(sequence.split('.')[-1],'digi2repack_step')
1495  return
1496 
1497  def prepare_L1(self, sequence = None):
1498  """ Enrich the schedule with the L1 simulation step"""
1499  assert(sequence == None)
1500  self.loadAndRemember(self.L1EMDefaultCFF)
1501  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1502  return
1503 
1504  def prepare_L1REPACK(self, sequence = None):
1505  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1506  supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT']
1507  if sequence in supported:
1508  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1509  if self._options.scenario == 'HeavyIons':
1510  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1511  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1512  else:
1513  print "L1REPACK with '",sequence,"' is not supported! Supported choices are: ",supported
1514  raise Exception('unsupported feature')
1515 
1516 
1517  def prepare_HLT(self, sequence = None):
1518  """ Enrich the schedule with the HLT simulation step"""
1519  if not sequence:
1520  print "no specification of the hlt menu has been given, should never happen"
1521  raise Exception('no HLT sequence provided')
1522 
1523  if '@' in sequence:
1524  # case where HLT:@something was provided
1525  from Configuration.HLT.autoHLT import autoHLT
1526  key = sequence[1:]
1527  if key in autoHLT:
1528  sequence = autoHLT[key]
1529  else:
1530  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1531 
1532  if ',' in sequence:
1533  #case where HLT:something:something was provided
1534  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1535  optionsForHLT = {}
1536  if self._options.scenario == 'HeavyIons':
1537  optionsForHLT['type'] = 'HIon'
1538  else:
1539  optionsForHLT['type'] = 'GRun'
1540  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.iteritems())
1541  if sequence == 'run,fromSource':
1542  if hasattr(self.process.source,'firstRun'):
1543  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1544  elif hasattr(self.process.source,'setRunNumber'):
1545  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1546  else:
1547  raise Exception('Cannot replace menu to load %s'%(sequence))
1548  else:
1549  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(sequence.replace(',',':'),optionsForHLTConfig))
1550  else:
1551  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % sequence)
1552 
1553  if self._options.isMC:
1554  if self._options.fast:
1555  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFastSim")
1556  else:
1557  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFullSim")
1558 
1559  if self._options.name != 'HLT':
1560  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1561  self.additionalCommands.append('process = ProcessName(process)')
1562  self.additionalCommands.append('')
1563  from HLTrigger.Configuration.CustomConfigs import ProcessName
1564  self.process = ProcessName(self.process)
1565 
1566  self.schedule.append(self.process.HLTSchedule)
1567  [self.blacklist_paths.append(path) for path in self.process.HLTSchedule if isinstance(path,(cms.Path,cms.EndPath))]
1568 
1569  #this is a fake, to be removed with fastim migration and HLT menu dump
1570  if self._options.fast:
1571  if not hasattr(self.process,'HLTEndSequence'):
1572  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1573 
1574 
1575  def prepare_RAW2RECO(self, sequence = None):
1576  if ','in sequence:
1577  seqReco=sequence.split(',')[1]
1578  seqDigi=sequence.split(',')[0]
1579  else:
1580  print "RAW2RECO requires two specifications",sequence,"insufficient"
1581 
1582  self.prepare_RAW2DIGI(seqDigi)
1583  self.prepare_RECO(seqReco)
1584  return
1585 
1586  def prepare_RAW2DIGI(self, sequence = "RawToDigi"):
1587  self.loadDefaultOrSpecifiedCFF(sequence,self.RAW2DIGIDefaultCFF)
1588  self.scheduleSequence(sequence,'raw2digi_step')
1589  # if self._options.isRepacked:
1590  #self.renameInputTagsInSequence(sequence)
1591  return
1592 
1593  def prepare_PATFILTER(self, sequence=None):
1594  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1595  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1596  for filt in allMetFilterPaths:
1597  self.schedule.append(getattr(self.process,'Flag_'+filt))
1598 
1599  def prepare_L1HwVal(self, sequence = 'L1HwVal'):
1600  ''' Enrich the schedule with L1 HW validation '''
1601  self.loadDefaultOrSpecifiedCFF(sequence,self.L1HwValDefaultCFF)
1602  #self.scheduleSequence(sequence.split('.')[-1],'l1hwval_step')
1603  print '\n\n\n DEPRECATED this has no action \n\n\n'
1604  return
1605 
1606  def prepare_L1Reco(self, sequence = "L1Reco"):
1607  ''' Enrich the schedule with L1 reconstruction '''
1608  self.loadDefaultOrSpecifiedCFF(sequence,self.L1RecoDefaultCFF)
1609  self.scheduleSequence(sequence.split('.')[-1],'L1Reco_step')
1610  return
1611 
1612  def prepare_L1TrackTrigger(self, sequence = "L1TrackTrigger"):
1613  ''' Enrich the schedule with L1 reconstruction '''
1615  self.scheduleSequence(sequence.split('.')[-1],'L1TrackTrigger_step')
1616  return
1617 
1618  def prepare_FILTER(self, sequence = None):
1619  ''' Enrich the schedule with a user defined filter sequence '''
1620  ## load the relevant part
1621  filterConfig=self.load(sequence.split('.')[0])
1622  filterSeq=sequence.split('.')[-1]
1623  ## print it in the configuration
1624  class PrintAllModules(object):
1625  def __init__(self):
1626  self.inliner=''
1627  pass
1628  def enter(self,visitee):
1629  try:
1630  label=visitee.label()
1631  ##needs to be in reverse order
1632  self.inliner=label+','+self.inliner
1633  except:
1634  pass
1635  def leave(self,v): pass
1636 
1637  expander=PrintAllModules()
1638  getattr(self.process,filterSeq).visit( expander )
1639  self._options.inlineObjets+=','+expander.inliner
1640  self._options.inlineObjets+=','+filterSeq
1641 
1642  ## put the filtering path in the schedule
1643  self.scheduleSequence(filterSeq,'filtering_step')
1644  self.nextScheduleIsConditional=True
1645  ## put it before all the other paths
1646  self.productionFilterSequence = filterSeq
1647 
1648  return
1649 
1650  def prepare_RECO(self, sequence = "reconstruction"):
1651  ''' Enrich the schedule with reconstruction '''
1652  self.loadDefaultOrSpecifiedCFF(sequence,self.RECODefaultCFF)
1653  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_step')
1654  return
1655 
1656  def prepare_RECOBEFMIX(self, sequence = "reconstruction"):
1657  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1658  if not self._options.fast:
1659  print "ERROR: this step is only implemented for FastSim"
1660  sys.exit()
1661  self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1662  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_befmix_step')
1663  return
1664 
1665  def prepare_PAT(self, sequence = "miniAOD"):
1666  ''' Enrich the schedule with PAT '''
1667  self.prepare_PATFILTER(self)
1668  self.loadDefaultOrSpecifiedCFF(sequence,self.PATDefaultCFF,1) #this is unscheduled
1669  if not self._options.runUnscheduled:
1670  raise Exception("MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1671  if self._options.isData:
1672  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1673  else:
1674  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1675  if self._options.fast:
1676  self._options.customisation_file_unsch.insert(1,"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff.miniAOD_customizeMETFiltersFastSim")
1677  return
1678 
1679  def prepare_EI(self, sequence = None):
1680  ''' Enrich the schedule with event interpretation '''
1681  from Configuration.StandardSequences.EventInterpretation import EventInterpretation
1682  if sequence in EventInterpretation:
1683  self.EIDefaultCFF = EventInterpretation[sequence]
1684  sequence = 'EIsequence'
1685  else:
1686  raise Exception('Cannot set %s event interpretation'%( sequence) )
1687  self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1688  self.scheduleSequence(sequence.split('.')[-1],'eventinterpretaion_step')
1689  return
1690 
1691  def prepare_SKIM(self, sequence = "all"):
1692  ''' Enrich the schedule with skimming fragments'''
1693  skimConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.SKIMDefaultCFF)
1694  sequence = sequence.split('.')[-1]
1695 
1696  skimlist=sequence.split('+')
1697  ## support @Mu+DiJet+@Electron configuration via autoSkim.py
1698  from Configuration.Skimming.autoSkim import autoSkim
1699  self.expandMapping(skimlist,autoSkim)
1700 
1701  #print "dictionnary for skims:",skimConfig.__dict__
1702  for skim in skimConfig.__dict__:
1703  skimstream = getattr(skimConfig,skim)
1704  if isinstance(skimstream,cms.Path):
1705  #black list the alca path so that they do not appear in the cfg
1706  self.blacklist_paths.append(skimstream)
1707  if (not isinstance(skimstream,cms.FilteredStream)):
1708  continue
1709  shortname = skim.replace('SKIMStream','')
1710  if (sequence=="all"):
1711  self.addExtraStream(skim,skimstream)
1712  elif (shortname in skimlist):
1713  self.addExtraStream(skim,skimstream)
1714  #add a DQM eventcontent for this guy
1715  if self._options.datatier=='DQM':
1716  self.process.load(self.EVTCONTDefaultCFF)
1717  skimstreamDQM = cms.FilteredStream(
1718  responsible = skimstream.responsible,
1719  name = skimstream.name+'DQM',
1720  paths = skimstream.paths,
1721  selectEvents = skimstream.selectEvents,
1722  content = self._options.datatier+'EventContent',
1723  dataTier = cms.untracked.string(self._options.datatier)
1724  )
1725  self.addExtraStream(skim+'DQM',skimstreamDQM)
1726  for i in range(skimlist.count(shortname)):
1727  skimlist.remove(shortname)
1728 
1729 
1730 
1731  if (skimlist.__len__()!=0 and sequence!="all"):
1732  print 'WARNING, possible typo with SKIM:'+'+'.join(skimlist)
1733  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1734 
1735  def prepare_USER(self, sequence = None):
1736  ''' Enrich the schedule with a user defined sequence '''
1737  self.loadDefaultOrSpecifiedCFF(sequence,self.USERDefaultCFF)
1738  self.scheduleSequence(sequence.split('.')[-1],'user_step')
1739  return
1740 
1741  def prepare_POSTRECO(self, sequence = None):
1742  """ Enrich the schedule with the postreco step """
1743  self.loadAndRemember(self.POSTRECODefaultCFF)
1744  self.scheduleSequence('postreco_generator','postreco_step')
1745  return
1746 
1747 
1748  def prepare_VALIDATION(self, sequence = 'validation'):
1749  print sequence,"in preparing validation"
1750  self.loadDefaultOrSpecifiedCFF(sequence,self.VALIDATIONDefaultCFF)
1751  from Validation.Configuration.autoValidation import autoValidation
1752  #in case VALIDATION:something:somethingelse -> something,somethingelse
1753  sequence=sequence.split('.')[-1]
1754  if sequence.find(',')!=-1:
1755  prevalSeqName=sequence.split(',')[0].split('+')
1756  valSeqName=sequence.split(',')[1].split('+')
1757  self.expandMapping(prevalSeqName,autoValidation,index=0)
1758  self.expandMapping(valSeqName,autoValidation,index=1)
1759  else:
1760  if '@' in sequence:
1761  prevalSeqName=sequence.split('+')
1762  valSeqName=sequence.split('+')
1763  self.expandMapping(prevalSeqName,autoValidation,index=0)
1764  self.expandMapping(valSeqName,autoValidation,index=1)
1765  else:
1766  postfix=''
1767  if sequence:
1768  postfix='_'+sequence
1769  prevalSeqName=['prevalidation'+postfix]
1770  valSeqName=['validation'+postfix]
1771  if not hasattr(self.process,valSeqName[0]):
1772  prevalSeqName=['']
1773  valSeqName=[sequence]
1774 
1775  def NFI(index):
1776  ##name from index, required to keep backward compatibility
1777  if index==0:
1778  return ''
1779  else:
1780  return '%s'%index
1781 
1782 
1783  #rename the HLT process in validation steps
1784  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1785  for s in valSeqName+prevalSeqName:
1786  if s:
1788  for (i,s) in enumerate(prevalSeqName):
1789  if s:
1790  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1791  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1792 
1793  for (i,s) in enumerate(valSeqName):
1794  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1795  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1796 
1797  #needed in case the miniAODValidation sequence is run starting from AODSIM
1798  if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1799  return
1800 
1801  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1802  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1803  self._options.restoreRNDSeeds=True
1804 
1805  if not 'DIGI' in self.stepMap and not self._options.fast:
1806  self.executeAndRemember("process.mix.playback = True")
1807  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1808  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1809  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1810 
1811  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1812  #will get in the schedule, smoothly
1813  for (i,s) in enumerate(valSeqName):
1814  getattr(self.process,'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,'validation_step%s'%NFI(i))._seq
1815 
1816  return
1817 
1818 
1820  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1821  It will climb down within PSets, VPSets and VInputTags to find its target"""
1822  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1823  self._paramReplace = paramReplace
1824  self._paramSearch = paramSearch
1825  self._verbose = verbose
1826  self._whitelist = whitelist
1827 
1828  def doIt(self,pset,base):
1829  if isinstance(pset, cms._Parameterizable):
1830  for name in pset.parameters_().keys():
1831  # skip whitelisted parameters
1832  if name in self._whitelist:
1833  continue
1834  # if I use pset.parameters_().items() I get copies of the parameter values
1835  # so I can't modify the nested pset
1836  value = getattr(pset,name)
1837  type = value.pythonTypeName()
1838  if type in ('cms.PSet', 'cms.untracked.PSet'):
1839  self.doIt(value,base+"."+name)
1840  elif type in ('cms.VPSet', 'cms.untracked.VPSet'):
1841  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1842  elif type in ('cms.string', 'cms.untracked.string'):
1843  if value.value() == self._paramSearch:
1844  if self._verbose: print "set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace)
1845  setattr(pset, name,self._paramReplace)
1846  elif type in ('cms.VInputTag', 'cms.untracked.VInputTag'):
1847  for (i,n) in enumerate(value):
1848  if not isinstance(n, cms.InputTag):
1849  n=cms.InputTag(n)
1850  if n.processName == self._paramSearch:
1851  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1852  if self._verbose:print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace)
1853  setattr(n,"processName",self._paramReplace)
1854  value[i]=n
1855  elif type in ('cms.vstring', 'cms.untracked.vstring'):
1856  for (i,n) in enumerate(value):
1857  if n==self._paramSearch:
1858  getattr(pset,name)[i]=self._paramReplace
1859  elif type in ('cms.InputTag', 'cms.untracked.InputTag'):
1860  if value.processName == self._paramSearch:
1861  if self._verbose: print "set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace)
1862  setattr(getattr(pset, name),"processName",self._paramReplace)
1863 
1864  def enter(self,visitee):
1865  label = ''
1866  try:
1867  label = visitee.label()
1868  except AttributeError:
1869  label = '<Module not in a Process>'
1870  except:
1871  label = 'other execption'
1872  self.doIt(visitee, label)
1873 
1874  def leave(self,visitee):
1875  pass
1876 
1877  #visit a sequence to repalce all input tags
1878  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1879  print "Replacing all InputTag %s => %s"%(oldT,newT)
1880  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1881  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1882  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1883  if not loadMe in self.additionalCommands:
1884  self.additionalCommands.append(loadMe)
1885  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1886 
1887  #change the process name used to address HLT results in any sequence
1888  def renameHLTprocessInSequence(self,sequence,proc=None,HLTprocess='HLT'):
1889  if self._options.hltProcess:
1890  proc=self._options.hltProcess
1891  else:
1892  proc=self.process.name_()
1893  if proc==HLTprocess: return
1894  # look up all module in dqm sequence
1895  print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1896  getattr(self.process,sequence).visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess,proc,whitelist = ("subSystemFolder",)))
1897  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1898  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1899  self.additionalCommands.append('process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1900 
1901 
1902  def expandMapping(self,seqList,mapping,index=None):
1903  maxLevel=20
1904  level=0
1905  while '@' in repr(seqList) and level<maxLevel:
1906  level+=1
1907  for specifiedCommand in seqList:
1908  if specifiedCommand.startswith('@'):
1909  location=specifiedCommand[1:]
1910  if not location in mapping:
1911  raise Exception("Impossible to map "+location+" from "+repr(mapping))
1912  mappedTo=mapping[location]
1913  if index!=None:
1914  mappedTo=mappedTo[index]
1915  seqList.remove(specifiedCommand)
1916  seqList.extend(mappedTo.split('+'))
1917  break;
1918  if level==maxLevel:
1919  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
1920 
1921  def prepare_DQM(self, sequence = 'DQMOffline'):
1922  # this one needs replacement
1923 
1924  self.loadDefaultOrSpecifiedCFF(sequence,self.DQMOFFLINEDefaultCFF)
1925  sequenceList=sequence.split('.')[-1].split('+')
1926  postSequenceList=sequence.split('.')[-1].split('+')
1927  from DQMOffline.Configuration.autoDQM import autoDQM
1928  self.expandMapping(sequenceList,autoDQM,index=0)
1929  self.expandMapping(postSequenceList,autoDQM,index=1)
1930 
1931  if len(set(sequenceList))!=len(sequenceList):
1932  sequenceList=list(set(sequenceList))
1933  print "Duplicate entries for DQM:, using",sequenceList
1934 
1935  pathName='dqmoffline_step'
1936  for (i,sequence) in enumerate(sequenceList):
1937  if (i!=0):
1938  pathName='dqmoffline_%d_step'%(i)
1939 
1940  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
1941  self.renameHLTprocessInSequence(sequence)
1942 
1943  # if both HLT and DQM are run in the same process, schedule [HLT]DQM in an EndPath
1944  # not for fastsim
1945  if 'HLT' in self.stepMap.keys() and not self._options.fast:
1946  # need to put [HLT]DQM in an EndPath, to access the HLT trigger results
1947  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1948  else:
1949  # schedule DQM as a standard Path
1950  setattr(self.process,pathName, cms.Path( getattr(self.process, sequence) ) )
1951  self.schedule.append(getattr(self.process,pathName))
1952 
1953  pathName='dqmofflineOnPAT_step'
1954  for (i,sequence) in enumerate(postSequenceList):
1955  if (i!=0):
1956  pathName='dqmofflineOnPAT_%d_step'%(i)
1957 
1958  # if both MINIAOD and DQM are run in the same process, schedule DQM in an EndPath
1959  if 'PAT' in self.stepMap.keys():
1960  # need to put DQM in an EndPath, to access the miniAOD filter results
1961  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1962  else:
1963  # schedule DQM as a standard Path
1964  setattr(self.process,pathName, cms.Path( getattr(self.process, sequence) ) )
1965  self.schedule.append(getattr(self.process,pathName))
1966 
1967  def prepare_HARVESTING(self, sequence = None):
1968  """ Enrich the process with harvesting step """
1969  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
1970  self.loadAndRemember(self.DQMSaverCFF)
1971 
1972  harvestingConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.HARVESTINGDefaultCFF)
1973  sequence = sequence.split('.')[-1]
1974 
1975  # decide which HARVESTING paths to use
1976  harvestingList = sequence.split("+")
1977  from DQMOffline.Configuration.autoDQM import autoDQM
1978  from Validation.Configuration.autoValidation import autoValidation
1979  import copy
1980  combined_mapping = copy.deepcopy( autoDQM )
1981  combined_mapping.update( autoValidation )
1982  self.expandMapping(harvestingList,combined_mapping,index=-1)
1983 
1984  if len(set(harvestingList))!=len(harvestingList):
1985  harvestingList=list(set(harvestingList))
1986  print "Duplicate entries for HARVESTING, using",harvestingList
1987 
1988  for name in harvestingList:
1989  if not name in harvestingConfig.__dict__:
1990  print name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1991  continue
1992  harvestingstream = getattr(harvestingConfig,name)
1993  if isinstance(harvestingstream,cms.Path):
1994  self.schedule.append(harvestingstream)
1995  self.blacklist_paths.append(harvestingstream)
1996  if isinstance(harvestingstream,cms.Sequence):
1997  setattr(self.process,name+"_step",cms.Path(harvestingstream))
1998  self.schedule.append(getattr(self.process,name+"_step"))
1999 
2000  self.scheduleSequence('DQMSaver','dqmsave_step')
2001  return
2002 
2003  def prepare_ALCAHARVEST(self, sequence = None):
2004  """ Enrich the process with AlCaHarvesting step """
2005  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2006  sequence=sequence.split(".")[-1]
2007 
2008  # decide which AlcaHARVESTING paths to use
2009  harvestingList = sequence.split("+")
2010 
2011 
2012 
2013  from Configuration.AlCa.autoPCL import autoPCL
2014  self.expandMapping(harvestingList,autoPCL)
2015 
2016  for name in harvestingConfig.__dict__:
2017  harvestingstream = getattr(harvestingConfig,name)
2018  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2019  self.schedule.append(harvestingstream)
2020  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2021  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2022  harvestingList.remove(name)
2023  # append the common part at the end of the sequence
2024  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2025  self.schedule.append(lastStep)
2026 
2027  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2028  print "The following harvesting could not be found : ", harvestingList
2029  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2030 
2031 
2032 
2033  def prepare_ENDJOB(self, sequence = 'endOfProcess'):
2034  self.loadDefaultOrSpecifiedCFF(sequence,self.ENDJOBDefaultCFF)
2035  self.scheduleSequenceAtEnd(sequence.split('.')[-1],'endjob_step')
2036  return
2037 
2039  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2040  self.schedule.append(self.process.reconstruction)
2041 
2042 
2043  def build_production_info(self, evt_type, evtnumber):
2044  """ Add useful info for the production. """
2045  self.process.configurationMetadata=cms.untracked.PSet\
2046  (version=cms.untracked.string("$Revision: 1.19 $"),
2047  name=cms.untracked.string("Applications"),
2048  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2049  )
2050 
2051  self.addedObjects.append(("Production Info","configurationMetadata"))
2052 
2053 
2054  def prepare(self, doChecking = False):
2055  """ Prepare the configuration string and add missing pieces."""
2056 
2057  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2058  self.addMaxEvents()
2059  if self.with_input:
2060  self.addSource()
2061  self.addStandardSequences()
2062  ##adding standard sequences might change the inputEventContent option and therefore needs to be finalized after
2063  self.completeInputCommand()
2064  self.addConditions()
2065 
2066 
2067  outputModuleCfgCode=""
2068  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2069  outputModuleCfgCode=self.addOutput()
2070 
2071  self.addCommon()
2072 
2073  self.pythonCfgCode = "# Auto generated configuration file\n"
2074  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2075  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2076  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2077  if hasattr(self._options,"era") and self._options.era :
2078  self.pythonCfgCode += "from Configuration.StandardSequences.Eras import eras\n\n"
2079  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"'" # Start of the line, finished after the loop
2080  # Multiple eras can be specified in a comma seperated list
2081  for requestedEra in self._options.era.split(",") :
2082  self.pythonCfgCode += ",eras."+requestedEra
2083  self.pythonCfgCode += ")\n\n" # end of the line
2084  else :
2085  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"')\n\n"
2086 
2087  self.pythonCfgCode += "# import of standard configurations\n"
2088  for module in self.imports:
2089  self.pythonCfgCode += ("process.load('"+module+"')\n")
2090 
2091  # production info
2092  if not hasattr(self.process,"configurationMetadata"):
2093  self.build_production_info(self._options.evt_type, self._options.number)
2094  else:
2095  #the PSet was added via a load
2096  self.addedObjects.append(("Production Info","configurationMetadata"))
2097 
2098  self.pythonCfgCode +="\n"
2099  for comment,object in self.addedObjects:
2100  if comment!="":
2101  self.pythonCfgCode += "\n# "+comment+"\n"
2102  self.pythonCfgCode += dumpPython(self.process,object)
2103 
2104  # dump the output definition
2105  self.pythonCfgCode += "\n# Output definition\n"
2106  self.pythonCfgCode += outputModuleCfgCode
2107 
2108  # dump all additional outputs (e.g. alca or skim streams)
2109  self.pythonCfgCode += "\n# Additional output definition\n"
2110  #I do not understand why the keys are not normally ordered.
2111  nl=self.additionalOutputs.keys()
2112  nl.sort()
2113  for name in nl:
2114  output = self.additionalOutputs[name]
2115  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2116  tmpOut = cms.EndPath(output)
2117  setattr(self.process,name+'OutPath',tmpOut)
2118  self.schedule.append(tmpOut)
2119 
2120  # dump all additional commands
2121  self.pythonCfgCode += "\n# Other statements\n"
2122  for command in self.additionalCommands:
2123  self.pythonCfgCode += command + "\n"
2124 
2125  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2126  for object in self._options.inlineObjets.split(','):
2127  if not object:
2128  continue
2129  if not hasattr(self.process,object):
2130  print 'cannot inline -'+object+'- : not known'
2131  else:
2132  self.pythonCfgCode +='\n'
2133  self.pythonCfgCode +=dumpPython(self.process,object)
2134 
2135  # dump all paths
2136  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2137  for path in self.process.paths:
2138  if getattr(self.process,path) not in self.blacklist_paths:
2139  self.pythonCfgCode += dumpPython(self.process,path)
2140 
2141  for endpath in self.process.endpaths:
2142  if getattr(self.process,endpath) not in self.blacklist_paths:
2143  self.pythonCfgCode += dumpPython(self.process,endpath)
2144 
2145  # dump the schedule
2146  self.pythonCfgCode += "\n# Schedule definition\n"
2147  result = "process.schedule = cms.Schedule("
2148 
2149  # handling of the schedule
2150  self.process.schedule = cms.Schedule()
2151  for item in self.schedule:
2152  if not isinstance(item, cms.Schedule):
2153  self.process.schedule.append(item)
2154  else:
2155  self.process.schedule.extend(item)
2156 
2157  if hasattr(self.process,"HLTSchedule"):
2158  beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2159  afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2160  pathNames = ['process.'+p.label_() for p in beforeHLT]
2161  result += ','.join(pathNames)+')\n'
2162  result += 'process.schedule.extend(process.HLTSchedule)\n'
2163  pathNames = ['process.'+p.label_() for p in afterHLT]
2164  result += 'process.schedule.extend(['+','.join(pathNames)+'])\n'
2165  else:
2166  pathNames = ['process.'+p.label_() for p in self.schedule]
2167  result ='process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2168 
2169  self.pythonCfgCode += result
2170 
2171  if self._options.nThreads is not "1":
2172  self.pythonCfgCode +="\n"
2173  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2174  self.pythonCfgCode +="process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+")\n"
2175  self.pythonCfgCode +="process.options.numberOfStreams=cms.untracked.uint32(0)\n"
2176  #repacked version
2177  if self._options.isRepacked:
2178  self.pythonCfgCode +="\n"
2179  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2180  self.pythonCfgCode +="MassReplaceInputTag(process)\n"
2181  MassReplaceInputTag(self.process)
2182 
2183  # special treatment in case of production filter sequence 2/2
2184  if self.productionFilterSequence:
2185  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2186  self.pythonCfgCode +='for path in process.paths:\n'
2187  if len(self.conditionalPaths):
2188  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2189  if len(self.excludedPaths):
2190  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2191  self.pythonCfgCode +='\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2192  pfs = getattr(self.process,self.productionFilterSequence)
2193  for path in self.process.paths:
2194  if not path in self.conditionalPaths: continue
2195  if path in self.excludedPaths: continue
2196  getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2197 
2198 
2199  # dump customise fragment
2200  self.pythonCfgCode += self.addCustomise()
2201 
2202  if self._options.runUnscheduled:
2203  # prune and delete paths
2204  #this is not supporting the blacklist at this point since I do not understand it
2205  self.pythonCfgCode+="#do not add changes to your config after this point (unless you know what you are doing)\n"
2206  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import convertToUnscheduled\n"
2207  self.pythonCfgCode+="process=convertToUnscheduled(process)\n"
2208 
2209  from FWCore.ParameterSet.Utilities import convertToUnscheduled
2210  self.process=convertToUnscheduled(self.process)
2211 
2212  #now add the unscheduled stuff
2213  for module in self.importsUnsch:
2214  self.process.load(module)
2215  self.pythonCfgCode += ("process.load('"+module+"')\n")
2216 
2217  #and clean the unscheduled stuff
2218  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import cleanUnscheduled\n"
2219  self.pythonCfgCode+="process=cleanUnscheduled(process)\n"
2220 
2221  from FWCore.ParameterSet.Utilities import cleanUnscheduled
2222  self.process=cleanUnscheduled(self.process)
2223 
2224 
2225  self.pythonCfgCode += self.addCustomise(1)
2226 
2227 
2228  # make the .io file
2229 
2230  if self._options.io:
2231  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2232  if not self._options.io.endswith('.io'): self._option.io+='.io'
2233  io=open(self._options.io,'w')
2234  ioJson={}
2235  if hasattr(self.process.source,"fileNames"):
2236  if len(self.process.source.fileNames.value()):
2237  ioJson['primary']=self.process.source.fileNames.value()
2238  if hasattr(self.process.source,"secondaryFileNames"):
2239  if len(self.process.source.secondaryFileNames.value()):
2240  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2241  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2242  ioJson['pileup']=self._options.pileup_input[4:]
2243  for (o,om) in self.process.outputModules_().items():
2244  ioJson[o]=om.fileName.value()
2245  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2246  if self.productionFilterSequence:
2247  ioJson['filter']=self.productionFilterSequence
2248  import json
2249  io.write(json.dumps(ioJson))
2250  return
2251 
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:34
assert(m_qm.get())
def visit
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_RECO
put the filtering path in the schedule
def massSearchReplaceAnyInputTag
Definition: helpers.py:271
def defineMixing
Definition: Mixing.py:173
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
inliner
needs to be in reverse order
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def convertToUnscheduled
Definition: Utilities.py:69
def cleanUnscheduled
Definition: Utilities.py:107
double split
Definition: MVATrainer.cc:139
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run