CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python
2 
3 __version__ = "$Revision: 1.19 $"
4 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
5 
6 import FWCore.ParameterSet.Config as cms
7 from FWCore.ParameterSet.Modules import _Module
8 import sys
9 import re
10 import collections
11 from subprocess import Popen,PIPE
12 import FWCore.ParameterSet.DictTypes as DictTypes
13 class Options:
14  pass
15 
16 # the canonical defaults
17 defaultOptions = Options()
18 defaultOptions.datamix = 'DataOnSim'
19 defaultOptions.isMC=False
20 defaultOptions.isData=True
21 defaultOptions.step=''
22 defaultOptions.pileup='NoPileUp'
23 defaultOptions.pileup_input = None
24 defaultOptions.pileup_dasoption = ''
25 defaultOptions.geometry = 'SimDB'
26 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
27 defaultOptions.magField = ''
28 defaultOptions.conditions = None
29 defaultOptions.useCondDBv1 = False
30 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
31 defaultOptions.harvesting= 'AtRunEnd'
32 defaultOptions.gflash = False
33 defaultOptions.number = -1
34 defaultOptions.number_out = None
35 defaultOptions.arguments = ""
36 defaultOptions.name = "NO NAME GIVEN"
37 defaultOptions.evt_type = ""
38 defaultOptions.filein = ""
39 defaultOptions.dasquery=""
40 defaultOptions.dasoption=""
41 defaultOptions.secondfilein = ""
42 defaultOptions.customisation_file = []
43 defaultOptions.customisation_file_unsch = []
44 defaultOptions.customise_commands = ""
45 defaultOptions.inline_custom=False
46 defaultOptions.particleTable = 'pythiapdt'
47 defaultOptions.particleTableList = ['pythiapdt','pdt']
48 defaultOptions.dirin = ''
49 defaultOptions.dirout = ''
50 defaultOptions.filetype = 'EDM'
51 defaultOptions.fileout = 'output.root'
52 defaultOptions.filtername = ''
53 defaultOptions.lazy_download = False
54 defaultOptions.custom_conditions = ''
55 defaultOptions.hltProcess = ''
56 defaultOptions.eventcontent = None
57 defaultOptions.datatier = None
58 defaultOptions.inlineEventContent = True
59 defaultOptions.inlineObjets =''
60 defaultOptions.hideGen=False
61 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
62 defaultOptions.beamspot=None
63 defaultOptions.outputDefinition =''
64 defaultOptions.inputCommands = None
65 defaultOptions.outputCommands = None
66 defaultOptions.inputEventContent = ''
67 defaultOptions.dropDescendant = False
68 defaultOptions.relval = None
69 defaultOptions.slhc = None
70 defaultOptions.profile = None
71 defaultOptions.isRepacked = False
72 defaultOptions.restoreRNDSeeds = False
73 defaultOptions.donotDropOnInput = ''
74 defaultOptions.python_filename =''
75 defaultOptions.io=None
76 defaultOptions.lumiToProcess=None
77 defaultOptions.fast=False
78 defaultOptions.runsAndWeightsForMC = None
79 defaultOptions.runsScenarioForMC = None
80 defaultOptions.runUnscheduled = False
81 defaultOptions.timeoutOutput = False
82 defaultOptions.nThreads = '1'
83 
84 # some helper routines
85 def dumpPython(process,name):
86  theObject = getattr(process,name)
87  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
88  return "process."+name+" = " + theObject.dumpPython("process")
89  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
90  return "process."+name+" = " + theObject.dumpPython()+"\n"
91  else:
92  return "process."+name+" = " + theObject.dumpPython()+"\n"
93 def filesFromList(fileName,s=None):
94  import os
95  import FWCore.ParameterSet.Config as cms
96  prim=[]
97  sec=[]
98  for line in open(fileName,'r'):
99  if line.count(".root")>=2:
100  #two files solution...
101  entries=line.replace("\n","").split()
102  if not entries[0] in prim:
103  prim.append(entries[0])
104  if not entries[1] in sec:
105  sec.append(entries[1])
106  elif (line.find(".root")!=-1):
107  entry=line.replace("\n","")
108  if not entry in prim:
109  prim.append(entry)
110  if s:
111  if not hasattr(s,"fileNames"):
112  s.fileNames=cms.untracked.vstring(prim)
113  else:
114  s.fileNames.extend(prim)
115  if len(sec)!=0:
116  if not hasattr(s,"secondaryFileNames"):
117  s.secondaryFileNames=cms.untracked.vstring(sec)
118  else:
119  s.secondaryFileNames.extend(sec)
120  print "found files: ",prim
121  if len(prim)==0:
122  raise Exception("There are not files in input from the file list")
123  if len(sec)!=0:
124  print "found parent files:",sec
125  return (prim,sec)
126 
127 def filesFromDASQuery(query,option="",s=None):
128  import os,time
129  import FWCore.ParameterSet.Config as cms
130  prim=[]
131  sec=[]
132  print "the query is",query
133  eC=5
134  count=0
135  while eC!=0 and count<3:
136  if count!=0:
137  print 'Sleeping, then retrying DAS'
138  time.sleep(100)
139  p = Popen('das_client.py %s --query "%s"'%(option,query), stdout=PIPE,shell=True)
140  pipe=p.stdout.read()
141  tupleP = os.waitpid(p.pid, 0)
142  eC=tupleP[1]
143  count=count+1
144  if eC==0:
145  print "DAS succeeded after",count,"attempts",eC
146  else:
147  print "DAS failed 3 times- I give up"
148  for line in pipe.split('\n'):
149  if line.count(".root")>=2:
150  #two files solution...
151  entries=line.replace("\n","").split()
152  if not entries[0] in prim:
153  prim.append(entries[0])
154  if not entries[1] in sec:
155  sec.append(entries[1])
156  elif (line.find(".root")!=-1):
157  entry=line.replace("\n","")
158  if not entry in prim:
159  prim.append(entry)
160  if s:
161  if not hasattr(s,"fileNames"):
162  s.fileNames=cms.untracked.vstring(prim)
163  else:
164  s.fileNames.extend(prim)
165  if len(sec)!=0:
166  if not hasattr(s,"secondaryFileNames"):
167  s.secondaryFileNames=cms.untracked.vstring(sec)
168  else:
169  s.secondaryFileNames.extend(sec)
170  print "found files: ",prim
171  if len(sec)!=0:
172  print "found parent files:",sec
173  return (prim,sec)
174 
175 def MassReplaceInputTag(aProcess,oldT="rawDataCollector",newT="rawDataRepacker"):
176  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
177  for s in aProcess.paths_().keys():
178  massSearchReplaceAnyInputTag(getattr(aProcess,s),oldT,newT)
179 
180 def anyOf(listOfKeys,dict,opt=None):
181  for k in listOfKeys:
182  if k in dict:
183  toReturn=dict[k]
184  dict.pop(k)
185  return toReturn
186  if opt!=None:
187  return opt
188  else:
189  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
190 
192  """The main building routines """
193 
194  def __init__(self, options, process = None, with_output = False, with_input = False ):
195  """options taken from old cmsDriver and optparse """
196 
197  options.outfile_name = options.dirout+options.fileout
198 
199  self._options = options
200 
201  if self._options.isData and options.isMC:
202  raise Exception("ERROR: You may specify only --data or --mc, not both")
203  #if not self._options.conditions:
204  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
205 
206  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
207  if 'ENDJOB' in self._options.step:
208  if (hasattr(self._options,"outputDefinition") and \
209  self._options.outputDefinition != '' and \
210  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
211  (hasattr(self._options,"datatier") and \
212  self._options.datatier and \
213  'DQMIO' in self._options.datatier):
214  print "removing ENDJOB from steps since not compatible with DQMIO dataTier"
215  self._options.step=self._options.step.replace(',ENDJOB','')
216 
217 
218 
219  # what steps are provided by this class?
220  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
221  self.stepMap={}
222  self.stepKeys=[]
223  for step in self._options.step.split(","):
224  if step=='': continue
225  stepParts = step.split(":")
226  stepName = stepParts[0]
227  if stepName not in stepList and not stepName.startswith('re'):
228  raise ValueError("Step "+stepName+" unknown")
229  if len(stepParts)==1:
230  self.stepMap[stepName]=""
231  elif len(stepParts)==2:
232  self.stepMap[stepName]=stepParts[1].split('+')
233  elif len(stepParts)==3:
234  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
235  else:
236  raise ValueError("Step definition "+step+" invalid")
237  self.stepKeys.append(stepName)
238 
239  #print "map of steps is:",self.stepMap
240 
241  self.with_output = with_output
242  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
243  self.with_output = False
244  self.with_input = with_input
245  if process == None:
246  self.process = cms.Process(self._options.name)
247  else:
248  self.process = process
249  self.imports = []
250  self.importsUnsch = []
251  self.define_Configs()
252  self.schedule = list()
253 
254  # we are doing three things here:
255  # creating a process to catch errors
256  # building the code to re-create the process
257 
258  self.additionalCommands = []
259  # TODO: maybe a list of to be dumped objects would help as well
260  self.blacklist_paths = []
261  self.addedObjects = []
262  self.additionalOutputs = {}
263 
264  self.productionFilterSequence = None
265  self.nextScheduleIsConditional=False
266  self.conditionalPaths=[]
267  self.excludedPaths=[]
268 
269  def profileOptions(self):
270  """
271  addIgProfService
272  Function to add the igprof profile service so that you can dump in the middle
273  of the run.
274  """
275  profileOpts = self._options.profile.split(':')
276  profilerStart = 1
277  profilerInterval = 100
278  profilerFormat = None
279  profilerJobFormat = None
280 
281  if len(profileOpts):
282  #type, given as first argument is unused here
283  profileOpts.pop(0)
284  if len(profileOpts):
285  startEvent = profileOpts.pop(0)
286  if not startEvent.isdigit():
287  raise Exception("%s is not a number" % startEvent)
288  profilerStart = int(startEvent)
289  if len(profileOpts):
290  eventInterval = profileOpts.pop(0)
291  if not eventInterval.isdigit():
292  raise Exception("%s is not a number" % eventInterval)
293  profilerInterval = int(eventInterval)
294  if len(profileOpts):
295  profilerFormat = profileOpts.pop(0)
296 
297 
298  if not profilerFormat:
299  profilerFormat = "%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace("_cfi", ""),
300  self._options.step,
301  self._options.pileup,
302  self._options.conditions,
303  self._options.datatier,
304  self._options.profileTypeLabel)
305  if not profilerJobFormat and profilerFormat.endswith(".gz"):
306  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
307  elif not profilerJobFormat:
308  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
309 
310  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
311 
312  def load(self,includeFile):
313  includeFile = includeFile.replace('/','.')
314  self.process.load(includeFile)
315  return sys.modules[includeFile]
316 
317  def loadAndRemember(self, includeFile,unsch=0):
318  """helper routine to load am memorize imports"""
319  # we could make the imports a on-the-fly data method of the process instance itself
320  # not sure if the latter is a good idea
321  includeFile = includeFile.replace('/','.')
322  if unsch==0:
323  self.imports.append(includeFile)
324  self.process.load(includeFile)
325  return sys.modules[includeFile]
326  else:
327  self.importsUnsch.append(includeFile)
328  return 0#sys.modules[includeFile]
329 
330  def executeAndRemember(self, command):
331  """helper routine to remember replace statements"""
332  self.additionalCommands.append(command)
333  if not command.strip().startswith("#"):
334  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
335  import re
336  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
337  #exec(command.replace("process.","self.process."))
338 
339  def addCommon(self):
340  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
341  self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring('ProductNotFound'),fileMode = cms.untracked.string('FULLMERGE'))
342  else:
343  self.process.options = cms.untracked.PSet( )
344 
345  if self._options.runUnscheduled:
346  self.process.options.allowUnscheduled=cms.untracked.bool(True)
347 
348  self.addedObjects.append(("","options"))
349 
350  if self._options.lazy_download:
351  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
352  stats = cms.untracked.bool(True),
353  enable = cms.untracked.bool(True),
354  cacheHint = cms.untracked.string("lazy-download"),
355  readHint = cms.untracked.string("read-ahead-buffered")
356  )
357  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
358 
359  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
360  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
361 
362  if self._options.profile:
363  (start, interval, eventFormat, jobFormat)=self.profileOptions()
364  self.process.IgProfService = cms.Service("IgProfService",
365  reportFirstEvent = cms.untracked.int32(start),
366  reportEventInterval = cms.untracked.int32(interval),
367  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
368  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
369  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
370 
371  def addMaxEvents(self):
372  """Here we decide how many evts will be processed"""
373  self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(int(self._options.number)))
374  if self._options.number_out:
375  self.process.maxEvents.output = cms.untracked.int32(int(self._options.number_out))
376  self.addedObjects.append(("","maxEvents"))
377 
378  def addSource(self):
379  """Here the source is built. Priority: file, generator"""
380  self.addedObjects.append(("Input source","source"))
381 
382  def filesFromOption(self):
383  for entry in self._options.filein.split(','):
384  print "entry",entry
385  if entry.startswith("filelist:"):
386  filesFromList(entry[9:],self.process.source)
387  elif entry.startswith("dbs:") or entry.startswith("das:"):
388  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
389  else:
390  self.process.source.fileNames.append(self._options.dirin+entry)
391  if self._options.secondfilein:
392  if not hasattr(self.process.source,"secondaryFileNames"):
393  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
394  for entry in self._options.secondfilein.split(','):
395  print "entry",entry
396  if entry.startswith("filelist:"):
397  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
398  elif entry.startswith("dbs:") or entry.startswith("das:"):
399  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
400  else:
401  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
402 
403  if self._options.filein or self._options.dasquery:
404  if self._options.filetype == "EDM":
405  self.process.source=cms.Source("PoolSource",
406  fileNames = cms.untracked.vstring(),
407  secondaryFileNames= cms.untracked.vstring())
408  filesFromOption(self)
409  elif self._options.filetype == "DAT":
410  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
411  filesFromOption(self)
412  elif self._options.filetype == "LHE":
413  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
414  if self._options.filein.startswith("lhe:"):
415  #list the article directory automatically
416  args=self._options.filein.split(':')
417  article=args[1]
418  print 'LHE input from article ',article
419  location='/store/lhe/'
420  import os
421  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
422  for line in textOfFiles:
423  for fileName in [x for x in line.split() if '.lhe' in x]:
424  self.process.source.fileNames.append(location+article+'/'+fileName)
425  if len(args)>2:
426  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
427  else:
428  filesFromOption(self)
429 
430 
431  elif self._options.filetype == "DQM":
432  self.process.source=cms.Source("DQMRootSource",
433  fileNames = cms.untracked.vstring())
434  filesFromOption(self)
435 
436  elif self._options.filetype == "DQMDAQ":
437  # FIXME: how to configure it if there are no input files specified?
438  self.process.source=cms.Source("DQMStreamerReader")
439 
440 
441  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
442  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
443 
444  if self._options.dasquery!='':
445  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
446  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
447 
448  ##drop LHEXMLStringProduct on input to save memory if appropriate
449  if 'GEN' in self.stepMap.keys():
450  if self._options.inputCommands:
451  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
452  else:
453  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
454 
455  if self.process.source and self._options.inputCommands:
456  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
457  for command in self._options.inputCommands.split(','):
458  # remove whitespace around the keep/drop statements
459  command = command.strip()
460  if command=='': continue
461  self.process.source.inputCommands.append(command)
462  if not self._options.dropDescendant:
463  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
464 
465  if self._options.lumiToProcess:
466  import FWCore.PythonUtilities.LumiList as LumiList
467  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
468 
469  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
470  if self.process.source is None:
471  self.process.source=cms.Source("EmptySource")
472 
473  # modify source in case of run-dependent MC
474  self.runsAndWeights=None
475  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
476  if not self._options.isMC :
477  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
478  if self._options.runsAndWeightsForMC:
479  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
480  else:
481  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
482  if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
483  __import__(RunsAndWeights[self._options.runsScenarioForMC])
484  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
485  else:
486  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
487 
488  if self.runsAndWeights:
489  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
490  ThrowAndSetRandomRun.throwAndSetRandomRun(self.process.source,self.runsAndWeights)
491  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
492  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
493 
494  return
495 
496  def addOutput(self):
497  """ Add output module to the process """
498  result=""
499  if self._options.outputDefinition:
500  if self._options.datatier:
501  print "--datatier & --eventcontent options ignored"
502 
503  #new output convention with a list of dict
504  outList = eval(self._options.outputDefinition)
505  for (id,outDefDict) in enumerate(outList):
506  outDefDictStr=outDefDict.__str__()
507  if not isinstance(outDefDict,dict):
508  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
509  #requires option: tier
510  theTier=anyOf(['t','tier','dataTier'],outDefDict)
511  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
512  ## event content
513  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
514  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
515  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
516  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
517  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
518  # module label has a particular role
519  if not theModuleLabel:
520  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
521  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
522  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
523  ]
524  for name in tryNames:
525  if not hasattr(self.process,name):
526  theModuleLabel=name
527  break
528  if not theModuleLabel:
529  raise Exception("cannot find a module label for specification: "+outDefDictStr)
530  if id==0:
531  defaultFileName=self._options.outfile_name
532  else:
533  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
534 
535  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
536  if not theFileName.endswith('.root'):
537  theFileName+='.root'
538 
539  if len(outDefDict.keys()):
540  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
541  if theStreamType=='DQMIO': theStreamType='DQM'
542  if theStreamType=='ALL':
543  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
544  else:
545  theEventContent = getattr(self.process, theStreamType+"EventContent")
546 
547 
548  addAlCaSelects=False
549  if theStreamType=='ALCARECO' and not theFilterName:
550  theFilterName='StreamALCACombined'
551  addAlCaSelects=True
552 
553  CppType='PoolOutputModule'
554  if self._options.timeoutOutput:
555  CppType='TimeoutPoolOutputModule'
556  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
557  output = cms.OutputModule(CppType,
558  theEventContent.clone(),
559  fileName = cms.untracked.string(theFileName),
560  dataset = cms.untracked.PSet(
561  dataTier = cms.untracked.string(theTier),
562  filterName = cms.untracked.string(theFilterName))
563  )
564  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
565  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
566  if not theSelectEvent and hasattr(self.process,'filtering_step'):
567  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
568  if theSelectEvent:
569  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
570 
571  if addAlCaSelects:
572  if not hasattr(output,'SelectEvents'):
573  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
574  for alca in self.AlCaPaths:
575  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
576 
577 
578  if hasattr(self.process,theModuleLabel):
579  raise Exception("the current process already has a module "+theModuleLabel+" defined")
580  #print "creating output module ",theModuleLabel
581  setattr(self.process,theModuleLabel,output)
582  outputModule=getattr(self.process,theModuleLabel)
583  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
584  path=getattr(self.process,theModuleLabel+'_step')
585  self.schedule.append(path)
586 
587  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
588  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"):
589  return label
590  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
591  if theExtraOutputCommands:
592  if not isinstance(theExtraOutputCommands,list):
593  raise Exception("extra ouput command in --option must be a list of strings")
594  if hasattr(self.process,theStreamType+"EventContent"):
595  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
596  else:
597  outputModule.outputCommands.extend(theExtraOutputCommands)
598 
599  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
600 
601  ##ends the --output options model
602  return result
603 
604  streamTypes=self._options.eventcontent.split(',')
605  tiers=self._options.datatier.split(',')
606  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
607  raise Exception("number of event content arguments does not match number of datatier arguments")
608 
609  # if the only step is alca we don't need to put in an output
610  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
611  return "\n"
612 
613  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
614  if streamType=='': continue
615  if streamType=='DQMIO': streamType='DQM'
616  theEventContent = getattr(self.process, streamType+"EventContent")
617  if i==0:
618  theFileName=self._options.outfile_name
619  theFilterName=self._options.filtername
620  else:
621  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
622  theFilterName=self._options.filtername
623  CppType='PoolOutputModule'
624  if self._options.timeoutOutput:
625  CppType='TimeoutPoolOutputModule'
626  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
627  output = cms.OutputModule(CppType,
628  theEventContent,
629  fileName = cms.untracked.string(theFileName),
630  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
631  filterName = cms.untracked.string(theFilterName)
632  )
633  )
634  if hasattr(self.process,"generation_step") and streamType!='LHE':
635  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
636  if hasattr(self.process,"filtering_step"):
637  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
638 
639  if streamType=='ALCARECO':
640  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
641 
642  if "MINIAOD" in streamType:
643  output.dropMetaData = cms.untracked.string('ALL')
644  output.fastCloning= cms.untracked.bool(False)
645  output.overrideInputFileSplitLevels = cms.untracked.bool(True)
646 
647  outputModuleName=streamType+'output'
648  setattr(self.process,outputModuleName,output)
649  outputModule=getattr(self.process,outputModuleName)
650  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
651  path=getattr(self.process,outputModuleName+'_step')
652  self.schedule.append(path)
653 
654  if self._options.outputCommands and streamType!='DQM':
655  for evct in self._options.outputCommands.split(','):
656  if not evct: continue
657  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
658 
659  if not self._options.inlineEventContent:
660  def doNotInlineEventContent(instance,label = "process."+streamType+"EventContent.outputCommands"):
661  return label
662  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
663 
664  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
665 
666  return result
667 
669  """
670  Add selected standard sequences to the process
671  """
672  # load the pile up file
673  if self._options.pileup:
674  pileupSpec=self._options.pileup.split(',')[0]
675 
676  # Does the requested pile-up scenario exist?
677  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
678  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
679  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
680  raise Exception(message)
681 
682  # Put mixing parameters in a dictionary
683  if '.' in pileupSpec:
684  mixingDict={'file':pileupSpec}
685  elif pileupSpec.startswith('file:'):
686  mixingDict={'file':pileupSpec[5:]}
687  else:
688  import copy
689  mixingDict=copy.copy(Mixing[pileupSpec])
690  if len(self._options.pileup.split(','))>1:
691  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
692 
693  # Load the pu cfg file corresponding to the requested pu scenario
694  if 'file:' in pileupSpec:
695  #the file is local
696  self.process.load(mixingDict['file'])
697  print "inlining mixing module configuration"
698  self._options.inlineObjets+=',mix'
699  else:
700  self.loadAndRemember(mixingDict['file'])
701 
702  # FastSim: transform cfg of MixingModule from FullSim to FastSim
703  if self._options.fast:
704  self._options.customisation_file.insert(0,"FastSimulation/Configuration/MixingModule_Full2Fast.prepareDigiRecoMixing")
705 
706  mixingDict.pop('file')
707  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
708  if self._options.pileup_input:
709  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
710  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
711  else:
712  mixingDict['F']=self._options.pileup_input.split(',')
713  specialization=defineMixing(mixingDict)
714  for command in specialization:
715  self.executeAndRemember(command)
716  if len(mixingDict)!=0:
717  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
718 
719 
720  # load the geometry file
721  try:
722  if len(self.stepMap):
723  self.loadAndRemember(self.GeometryCFF)
724  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
725  self.loadAndRemember(self.SimGeometryCFF)
726  if self.geometryDBLabel:
727  self.executeAndRemember('process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
728  except ImportError:
729  print "Geometry option",self._options.geometry,"unknown."
730  raise
731 
732  if len(self.stepMap):
733  self.loadAndRemember(self.magFieldCFF)
734 
735  for stepName in self.stepKeys:
736  stepSpec = self.stepMap[stepName]
737  print "Step:", stepName,"Spec:",stepSpec
738  if stepName.startswith('re'):
739  ##add the corresponding input content
740  if stepName[2:] not in self._options.donotDropOnInput:
741  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
742  stepName=stepName[2:]
743  if stepSpec=="":
744  getattr(self,"prepare_"+stepName)(sequence = getattr(self,stepName+"DefaultSeq"))
745  elif type(stepSpec)==list:
746  getattr(self,"prepare_"+stepName)(sequence = '+'.join(stepSpec))
747  elif type(stepSpec)==tuple:
748  getattr(self,"prepare_"+stepName)(sequence = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
749  else:
750  raise ValueError("Invalid step definition")
751 
752  if self._options.restoreRNDSeeds!=False:
753  #it is either True, or a process name
754  if self._options.restoreRNDSeeds==True:
755  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
756  else:
757  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
758  if self._options.inputEventContent or self._options.inputCommands:
759  if self._options.inputCommands:
760  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
761  else:
762  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
763 
764 
766  if self._options.inputEventContent:
767  import copy
768  def dropSecondDropStar(iec):
769  #drop occurence of 'drop *' in the list
770  count=0
771  for item in iec:
772  if item=='drop *':
773  if count!=0:
774  iec.remove(item)
775  count+=1
776 
777 
778  ## allow comma separated input eventcontent
779  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
780  for evct in self._options.inputEventContent.split(','):
781  if evct=='': continue
782  theEventContent = getattr(self.process, evct+"EventContent")
783  if hasattr(theEventContent,'outputCommands'):
784  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
785  if hasattr(theEventContent,'inputCommands'):
786  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
787 
788  dropSecondDropStar(self.process.source.inputCommands)
789 
790  if not self._options.dropDescendant:
791  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
792 
793 
794  return
795 
796  def addConditions(self):
797  """Add conditions to the process"""
798  if not self._options.conditions: return
799 
800  if 'FrontierConditions_GlobalTag' in self._options.conditions:
801  print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line'
802  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
803 
804  self.loadAndRemember(self.ConditionsDefaultCFF)
805 
806  if self._options.useCondDBv1:
807  from Configuration.AlCa.GlobalTag_condDBv1 import GlobalTag
808  else:
809  from Configuration.AlCa.GlobalTag import GlobalTag
810 
811  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
812 
813  if self._options.useCondDBv1:
814  self.additionalCommands.append('from Configuration.AlCa.GlobalTag_condDBv1 import GlobalTag')
815  else:
816  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
817 
818  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
819 
820  if self._options.slhc:
821  self.loadAndRemember("SLHCUpgradeSimulations/Geometry/fakeConditions_%s_cff"%(self._options.slhc,))
822 
823 
824  def addCustomise(self,unsch=0):
825  """Include the customise code """
826 
827  custOpt=[]
828  if unsch==0:
829  for c in self._options.customisation_file:
830  custOpt.extend(c.split(","))
831  else:
832  for c in self._options.customisation_file_unsch:
833  custOpt.extend(c.split(","))
834 
836  for opt in custOpt:
837  if opt=='': continue
838  if opt.count('.')>1:
839  raise Exception("more than . in the specification:"+opt)
840  fileName=opt.split('.')[0]
841  if opt.count('.')==0: rest='customise'
842  else:
843  rest=opt.split('.')[1]
844  if rest=='py': rest='customise' #catch the case of --customise file.py
845 
846  if fileName in custMap:
847  custMap[fileName].extend(rest.split('+'))
848  else:
849  custMap[fileName]=rest.split('+')
850 
851  if len(custMap)==0:
852  final_snippet='\n'
853  else:
854  final_snippet='\n# customisation of the process.\n'
855 
856  allFcn=[]
857  for opt in custMap:
858  allFcn.extend(custMap[opt])
859  for fcn in allFcn:
860  if allFcn.count(fcn)!=1:
861  raise Exception("cannot specify twice "+fcn+" as a customisation method")
862 
863  for f in custMap:
864  # let python search for that package and do syntax checking at the same time
865  packageName = f.replace(".py","").replace("/",".")
866  __import__(packageName)
867  package = sys.modules[packageName]
868 
869  # now ask the package for its definition and pick .py instead of .pyc
870  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
871 
872  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
873  if self._options.inline_custom:
874  for line in file(customiseFile,'r'):
875  if "import FWCore.ParameterSet.Config" in line:
876  continue
877  final_snippet += line
878  else:
879  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
880  for fcn in custMap[f]:
881  print "customising the process with",fcn,"from",f
882  if not hasattr(package,fcn):
883  #bound to fail at run time
884  raise Exception("config "+f+" has no function "+fcn)
885  #execute the command
886  self.process=getattr(package,fcn)(self.process)
887  #and print it in the configuration
888  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
889  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
890 
891  if len(custMap)!=0:
892  final_snippet += '\n# End of customisation functions\n'
893 
894  ### now for a useful command
895  if unsch==1 or not self._options.runUnscheduled:
896  if self._options.customise_commands:
897  import string
898  final_snippet +='\n# Customisation from command line'
899  for com in self._options.customise_commands.split('\\n'):
900  com=string.lstrip(com)
901  self.executeAndRemember(com)
902  final_snippet +='\n'+com
903 
904  return final_snippet
905 
906  #----------------------------------------------------------------------------
907  # here the methods to define the python includes for each step or
908  # conditions
909  #----------------------------------------------------------------------------
910  def define_Configs(self):
911  if len(self.stepMap):
912  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
913  if self._options.particleTable not in defaultOptions.particleTableList:
914  print 'Invalid particle table provided. Options are:'
915  print defaultOptions.particleTable
916  sys.exit(-1)
917  else:
918  if len(self.stepMap):
919  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
920 
921  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
922 
923  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
924  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
925  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
926  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
927  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
928  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
929  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
930  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
931  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
932  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
933  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
934  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
935  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
936  self.EIDefaultCFF=None
937  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
938  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
939  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
940  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
941  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
942  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
943  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
944  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
945  if self._options.useCondDBv1:
946  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_condDBv1_cff"
947  else:
948  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
949  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
950  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
951 
952  if "DATAMIX" in self.stepMap.keys():
953  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
954  if self._options.datamix == 'PreMix':
955  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDMPreMix_cff"
956  else:
957  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
958  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
959  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
960 
961  if "DIGIPREMIX" in self.stepMap.keys():
962  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_PreMix_cff"
963 
964  self.ALCADefaultSeq=None
965  self.LHEDefaultSeq='externalLHEProducer'
966  self.GENDefaultSeq='pgen'
967  self.SIMDefaultSeq='psim'
968  self.DIGIDefaultSeq='pdigi'
969  self.DIGIPREMIXDefaultSeq='pdigi'
970  self.DIGIPREMIX_S2DefaultSeq='pdigi'
971  self.DATAMIXDefaultSeq=None
972  self.DIGI2RAWDefaultSeq='DigiToRaw'
973  self.HLTDefaultSeq='GRun'
974  self.L1DefaultSeq=None
975  self.L1REPACKDefaultSeq='GT'
976  self.HARVESTINGDefaultSeq=None
977  self.ALCAHARVESTDefaultSeq=None
978  self.CFWRITERDefaultSeq=None
979  self.RAW2DIGIDefaultSeq='RawToDigi'
980  self.L1RecoDefaultSeq='L1Reco'
981  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
982  if 'RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap:
983  self.RECODefaultSeq='reconstruction'
984  else:
985  self.RECODefaultSeq='reconstruction_fromRECO'
986 
987  self.EIDefaultSeq='top'
988  self.POSTRECODefaultSeq=None
989  self.L1HwValDefaultSeq='L1HwVal'
990  self.DQMDefaultSeq='DQMOffline'
991  self.VALIDATIONDefaultSeq=''
992  self.ENDJOBDefaultSeq='endOfProcess'
993  self.REPACKDefaultSeq='DigiToRawRepack'
994  self.PATDefaultSeq='miniAOD'
995 
996  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
997 
998  if not self._options.beamspot:
999  self._options.beamspot=VtxSmearedDefaultKey
1000 
1001  # if its MC then change the raw2digi
1002  if self._options.isMC==True:
1003  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
1004  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
1005  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
1006  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
1007  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
1008  else:
1009  self._options.beamspot = None
1010 
1011  #patch for gen, due to backward incompatibility
1012  if 'reGEN' in self.stepMap:
1013  self.GENDefaultSeq='fixGenInfo'
1014 
1015  if self._options.scenario=='cosmics':
1016  self._options.pileup='Cosmics'
1017  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1018  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1019  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1020  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1021  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1022  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1023  if self._options.isMC==True:
1024  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1025  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1026  self.RECODefaultSeq='reconstructionCosmics'
1027  self.DQMDefaultSeq='DQMOfflineCosmics'
1028 
1029  if self._options.scenario=='HeavyIons':
1030  if not self._options.beamspot:
1031  self._options.beamspot=VtxSmearedHIDefaultKey
1032  self.HLTDefaultSeq = 'HIon'
1033  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1034  self.VALIDATIONDefaultSeq=''
1035  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1036  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1037  self.RECODefaultSeq='reconstructionHeavyIons'
1038  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1039  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1040  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1041  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1042  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1043  if self._options.isMC==True:
1044  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1045 
1046 
1047  self.RAW2RECODefaultSeq=','.join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1048 
1049  self.USERDefaultSeq='user'
1050  self.USERDefaultCFF=None
1051 
1052  # the magnetic field
1053  if self._options.isData:
1054  if self._options.magField==defaultOptions.magField:
1055  print "magnetic field option forced to: AutoFromDBCurrent"
1056  self._options.magField='AutoFromDBCurrent'
1057  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1058  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1059 
1060  # the geometry
1061  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1062  self.geometryDBLabel=None
1063  simGeometry=''
1064  if self._options.fast:
1065  if 'start' in self._options.conditions.lower():
1066  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1067  else:
1068  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1069  else:
1070  def inGeometryKeys(opt):
1071  from Configuration.StandardSequences.GeometryConf import GeometryConf
1072  if opt in GeometryConf:
1073  return GeometryConf[opt]
1074  else:
1075  return opt
1076 
1077  geoms=self._options.geometry.split(',')
1078  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1079  if len(geoms)==2:
1080  #may specify the reco geometry
1081  if '/' in geoms[1] or '_cff' in geoms[1]:
1082  self.GeometryCFF=geoms[1]
1083  else:
1084  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1085 
1086  if (geoms[0].startswith('DB:')):
1087  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1088  self.geometryDBLabel=geoms[0][3:]
1089  print "with DB:"
1090  else:
1091  if '/' in geoms[0] or '_cff' in geoms[0]:
1092  self.SimGeometryCFF=geoms[0]
1093  else:
1094  simGeometry=geoms[0]
1095  if self._options.gflash==True:
1096  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1097  else:
1098  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1099 
1100  # synchronize the geometry configuration and the FullSimulation sequence to be used
1101  if simGeometry not in defaultOptions.geometryExtendedOptions:
1102  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1103 
1104  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1105  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1106  self._options.beamspot='NoSmear'
1107 
1108  # if fastsim switch event content
1109  if self._options.fast:
1110  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1111  self.SIMDefaultSeq = 'psim'
1112  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1113  self.RECODefaultSeq= 'reconstruction'
1114  self.EVTCONTDefaultCFF = "FastSimulation.Configuration.EventContent_cff"
1115  self.VALIDATIONDefaultCFF = "FastSimulation.Configuration.Validation_cff"
1116  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1117  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1118  self.DIGIDefaultCFF = 'FastSimulation.Configuration.Digi_cff'
1119  if self._options.datamix == 'PreMix':
1120  self.DIGIDefaultCFF="FastSimulation.Configuration.DigiDMPreMix_cff"
1121  if "DIGIPREMIX" in self.stepMap.keys():
1122  self.DIGIDefaultCFF="FastSimulation.Configuration.Digi_PreMix_cff"
1123  if "DATAMIX" in self.stepMap.keys():
1124  self.DATAMIXDefaultCFF="FastSimulation.Configuration.DataMixer"+self._options.datamix+"_cff"
1125 
1126  self.DIGIDefaultSeq = 'pdigi'
1127  self.L1EMDefaultCFF='FastSimulation.Configuration.SimL1Emulator_cff'
1128  self.L1RecoDefaultCFF='FastSimulation.Configuration.L1Reco_cff'
1129  self.DIGI2RAWDefaultCFF = 'FastSimulation.Configuration.DigiToRaw_cff'
1130  self.DIGI2RAWDefaultSeq = 'DigiToRaw'
1131  self.EVTCONTDefaultCFF = "FastSimulation.Configuration.EventContent_cff"
1132  self.VALIDATIONDefaultCFF = "FastSimulation.Configuration.Validation_cff"
1133 
1134 
1135 
1136  # Mixing
1137  if self._options.pileup=='default':
1138  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1139  self._options.pileup=MixingDefaultKey
1140 
1141 
1142  #not driven by a default cff anymore
1143  if self._options.isData:
1144  self._options.pileup=None
1145 
1146  if self._options.slhc:
1147  self.GeometryCFF='SLHCUpgradeSimulations.Geometry.%s_cmsSimIdealGeometryXML_cff'%(self._options.slhc,)
1148  if 'stdgeom' not in self._options.slhc:
1149  self.SimGeometryCFF='SLHCUpgradeSimulations.Geometry.%s_cmsSimIdealGeometryXML_cff'%(self._options.slhc,)
1150  self.DIGIDefaultCFF='SLHCUpgradeSimulations/Geometry/Digi_%s_cff'%(self._options.slhc,)
1151  if self._options.pileup!=defaultOptions.pileup:
1152  self._options.pileup='SLHC_%s_%s'%(self._options.pileup,self._options.slhc)
1153 
1154  self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1155 
1156  # for alca, skims, etc
1157  def addExtraStream(self,name,stream,workflow='full'):
1158  # define output module and go from there
1159  output = cms.OutputModule("PoolOutputModule")
1160  if stream.selectEvents.parameters_().__len__()!=0:
1161  output.SelectEvents = stream.selectEvents
1162  else:
1163  output.SelectEvents = cms.untracked.PSet()
1164  output.SelectEvents.SelectEvents=cms.vstring()
1165  if isinstance(stream.paths,tuple):
1166  for path in stream.paths:
1167  output.SelectEvents.SelectEvents.append(path.label())
1168  else:
1169  output.SelectEvents.SelectEvents.append(stream.paths.label())
1170 
1171 
1172 
1173  if isinstance(stream.content,str):
1174  evtPset=getattr(self.process,stream.content)
1175  for p in evtPset.parameters_():
1176  setattr(output,p,getattr(evtPset,p))
1177  if not self._options.inlineEventContent:
1178  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1179  return label
1180  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1181  else:
1182  output.outputCommands = stream.content
1183 
1184 
1185  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1186 
1187  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1188  filterName = cms.untracked.string(stream.name))
1189 
1190  if self._options.filtername:
1191  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1192 
1193  #add an automatic flushing to limit memory consumption
1194  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1195 
1196  if workflow in ("producers,full"):
1197  if isinstance(stream.paths,tuple):
1198  for path in stream.paths:
1199  self.schedule.append(path)
1200  else:
1201  self.schedule.append(stream.paths)
1202 
1203 
1204  # in case of relvals we don't want to have additional outputs
1205  if (not self._options.relval) and workflow in ("full","output"):
1206  self.additionalOutputs[name] = output
1207  setattr(self.process,name,output)
1208 
1209  if workflow == 'output':
1210  # adjust the select events to the proper trigger results from previous process
1211  filterList = output.SelectEvents.SelectEvents
1212  for i, filter in enumerate(filterList):
1213  filterList[i] = filter+":"+self._options.triggerResultsProcess
1214 
1215  return output
1216 
1217  #----------------------------------------------------------------------------
1218  # here the methods to create the steps. Of course we are doing magic here ;)
1219  # prepare_STEPNAME modifies self.process and what else's needed.
1220  #----------------------------------------------------------------------------
1221 
1222  def loadDefaultOrSpecifiedCFF(self, sequence,defaultCFF,unsch=0):
1223  if ( len(sequence.split('.'))==1 ):
1224  l=self.loadAndRemember(defaultCFF,unsch)
1225  elif ( len(sequence.split('.'))==2 ):
1226  l=self.loadAndRemember(sequence.split('.')[0],unsch)
1227  sequence=sequence.split('.')[1]
1228  else:
1229  print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a"
1230  print sequence,"not recognized"
1231  raise
1232  return l
1233 
1234  def scheduleSequence(self,seq,prefix,what='Path'):
1235  if '*' in seq:
1236  #create only one path with all sequences in it
1237  for i,s in enumerate(seq.split('*')):
1238  if i==0:
1239  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1240  else:
1241  p=getattr(self.process,prefix)
1242  p+=getattr(self.process, s)
1243  self.schedule.append(getattr(self.process,prefix))
1244  return
1245  else:
1246  #create as many path as many sequences
1247  if not '+' in seq:
1248  if self.nextScheduleIsConditional:
1249  self.conditionalPaths.append(prefix)
1250  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1251  self.schedule.append(getattr(self.process,prefix))
1252  else:
1253  for i,s in enumerate(seq.split('+')):
1254  sn=prefix+'%d'%(i)
1255  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1256  self.schedule.append(getattr(self.process,sn))
1257  return
1258 
1259  def scheduleSequenceAtEnd(self,seq,prefix):
1260  self.scheduleSequence(seq,prefix,what='EndPath')
1261  return
1262 
1263  def prepare_ALCAPRODUCER(self, sequence = None):
1264  self.prepare_ALCA(sequence, workflow = "producers")
1265 
1266  def prepare_ALCAOUTPUT(self, sequence = None):
1267  self.prepare_ALCA(sequence, workflow = "output")
1268 
1269  def prepare_ALCA(self, sequence = None, workflow = 'full'):
1270  """ Enrich the process with alca streams """
1271  print 'DL enriching',workflow,sequence
1272  alcaConfig=self.loadDefaultOrSpecifiedCFF(sequence,self.ALCADefaultCFF)
1273  sequence = sequence.split('.')[-1]
1274 
1275  # decide which ALCA paths to use
1276  alcaList = sequence.split("+")
1277  maxLevel=0
1278  from Configuration.AlCa.autoAlca import autoAlca
1279  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1280  self.expandMapping(alcaList,autoAlca)
1281  self.AlCaPaths=[]
1282  for name in alcaConfig.__dict__:
1283  alcastream = getattr(alcaConfig,name)
1284  shortName = name.replace('ALCARECOStream','')
1285  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1286  output = self.addExtraStream(name,alcastream, workflow = workflow)
1287  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1288  self.AlCaPaths.append(shortName)
1289  if 'DQM' in alcaList:
1290  if not self._options.inlineEventContent and hasattr(self.process,name):
1291  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1292  else:
1293  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1294 
1295  #rename the HLT process name in the alca modules
1296  if self._options.hltProcess or 'HLT' in self.stepMap:
1297  if isinstance(alcastream.paths,tuple):
1298  for path in alcastream.paths:
1299  self.renameHLTprocessInSequence(path.label())
1300  else:
1301  self.renameHLTprocessInSequence(alcastream.paths.label())
1302 
1303  for i in range(alcaList.count(shortName)):
1304  alcaList.remove(shortName)
1305 
1306  # DQM needs a special handling
1307  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1308  path = getattr(alcaConfig,name)
1309  self.schedule.append(path)
1310  alcaList.remove('DQM')
1311 
1312  if isinstance(alcastream,cms.Path):
1313  #black list the alca path so that they do not appear in the cfg
1314  self.blacklist_paths.append(alcastream)
1315 
1316 
1317  if len(alcaList) != 0:
1318  available=[]
1319  for name in alcaConfig.__dict__:
1320  alcastream = getattr(alcaConfig,name)
1321  if isinstance(alcastream,cms.FilteredStream):
1322  available.append(name.replace('ALCARECOStream',''))
1323  print "The following alcas could not be found "+str(alcaList)
1324  print "available ",available
1325  #print "verify your configuration, ignoring for now"
1326  raise Exception("The following alcas could not be found "+str(alcaList))
1327 
1328  def prepare_LHE(self, sequence = None):
1329  #load the fragment
1330  ##make it loadable
1331  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1332  print "Loading lhe fragment from",loadFragment
1333  __import__(loadFragment)
1334  self.process.load(loadFragment)
1335  ##inline the modules
1336  self._options.inlineObjets+=','+sequence
1337 
1338  getattr(self.process,sequence).nEvents = int(self._options.number)
1339 
1340  #schedule it
1341  self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1342  self.excludedPaths.append("lhe_step")
1343  self.schedule.append( self.process.lhe_step )
1344 
1345  def prepare_GEN(self, sequence = None):
1346  """ load the fragment of generator configuration """
1347  loadFailure=False
1348  #remove trailing .py
1349  #support old style .cfi by changing into something.cfi into something_cfi
1350  #remove python/ from the name
1351  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1352  #standard location of fragments
1353  if not '/' in loadFragment:
1354  loadFragment='Configuration.Generator.'+loadFragment
1355  else:
1356  loadFragment=loadFragment.replace('/','.')
1357  try:
1358  print "Loading generator fragment from",loadFragment
1359  __import__(loadFragment)
1360  except:
1361  loadFailure=True
1362  #if self.process.source and self.process.source.type_()=='EmptySource':
1363  if not (self._options.filein or self._options.dasquery):
1364  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1365 
1366  if not loadFailure:
1367  generatorModule=sys.modules[loadFragment]
1368  genModules=generatorModule.__dict__
1369  #remove lhe producer module since this should have been
1370  #imported instead in the LHE step
1371  if self.LHEDefaultSeq in genModules:
1372  del genModules[self.LHEDefaultSeq]
1373 
1374  if self._options.hideGen:
1375  self.loadAndRemember(loadFragment)
1376  else:
1377  self.process.load(loadFragment)
1378  # expose the objects from that fragment to the configuration
1379  import FWCore.ParameterSet.Modules as cmstypes
1380  for name in genModules:
1381  theObject = getattr(generatorModule,name)
1382  if isinstance(theObject, cmstypes._Module):
1383  self._options.inlineObjets=name+','+self._options.inlineObjets
1384  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1385  self._options.inlineObjets+=','+name
1386 
1387  if sequence == self.GENDefaultSeq or sequence == 'pgen_genonly':
1388  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1389  self.productionFilterSequence = 'ProductionFilterSequence'
1390  elif 'generator' in genModules:
1391  self.productionFilterSequence = 'generator'
1392 
1393  """ Enrich the schedule with the rest of the generation step """
1394  self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1395  genSeqName=sequence.split('.')[-1]
1396 
1397  if True:
1398  try:
1399  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1400  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1401  self.loadAndRemember(cffToBeLoaded)
1402  except ImportError:
1403  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1404 
1405  if self._options.scenario == 'HeavyIons':
1406  if self._options.pileup=='HiMixGEN':
1407  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1408  else:
1409  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1410 
1411  self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1412  self.schedule.append(self.process.generation_step)
1413 
1414  #register to the genstepfilter the name of the path (static right now, but might evolve)
1415  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1416 
1417  if 'reGEN' in self.stepMap:
1418  #stop here
1419  return
1420 
1421  """ Enrich the schedule with the summary of the filter step """
1422  #the gen filter in the endpath
1423  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1424  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1425  return
1426 
1427  def prepare_SIM(self, sequence = None):
1428  """ Enrich the schedule with the simulation step"""
1429  self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1430  if not self._options.fast:
1431  if self._options.gflash==True:
1432  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1433 
1434  if self._options.magField=='0T':
1435  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1436  else:
1437  if self._options.magField=='0T':
1438  self.executeAndRemember("process.famosSimHits.UseMagneticField = cms.bool(False)")
1439 
1440  self.scheduleSequence(sequence.split('.')[-1],'simulation_step')
1441  return
1442 
1443  def prepare_DIGI(self, sequence = None):
1444  """ Enrich the schedule with the digitisation step"""
1445  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1446 
1447  if self._options.gflash==True:
1448  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1449 
1450  if sequence == 'pdigi_valid':
1451  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1452 
1453  if sequence != 'pdigi_nogen' and sequence != 'pdigi_valid_nogen' and not self.process.source.type_()=='EmptySource':
1454  if self._options.inputEventContent=='':
1455  self._options.inputEventContent='REGEN'
1456  else:
1457  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1458 
1459 
1460  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1461  return
1462 
1463  def prepare_DIGIPREMIX(self, sequence = None):
1464  """ Enrich the schedule with the digitisation step"""
1465  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1466 
1467  self.loadAndRemember("SimGeneral/MixingModule/digi_noNoise_cfi")
1468 
1469  if sequence == 'pdigi_valid':
1470  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1471  else:
1472  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1473 
1474  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1475  return
1476 
1477  def prepare_DIGIPREMIX_S2(self, sequence = None):
1478  """ Enrich the schedule with the digitisation step"""
1479  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1480 
1481  self.loadAndRemember("SimGeneral/MixingModule/digi_MixPreMix_cfi")
1482 
1483 
1484  if sequence == 'pdigi_valid':
1485  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1486  else:
1487  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1488 
1489  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1490  return
1491 
1492  def prepare_CFWRITER(self, sequence = None):
1493  """ Enrich the schedule with the crossing frame writer step"""
1494  self.loadAndRemember(self.CFWRITERDefaultCFF)
1495  self.scheduleSequence('pcfw','cfwriter_step')
1496  return
1497 
1498  def prepare_DATAMIX(self, sequence = None):
1499  """ Enrich the schedule with the digitisation step"""
1500  self.loadAndRemember(self.DATAMIXDefaultCFF)
1501  self.scheduleSequence('pdatamix','datamixing_step')
1502 
1503  if self._options.pileup_input:
1504  theFiles=''
1505  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1506  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1507  elif self._options.pileup_input.startswith("filelist:"):
1508  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1509  else:
1510  theFiles=self._options.pileup_input.split(',')
1511  #print theFiles
1512  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1513 
1514  return
1515 
1516  def prepare_DIGI2RAW(self, sequence = None):
1517  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGI2RAWDefaultCFF)
1518  self.scheduleSequence(sequence.split('.')[-1],'digi2raw_step')
1519  if "DIGIPREMIX" in self.stepMap.keys():
1520  self.executeAndRemember("process.esDigiToRaw.Label = cms.string('mix')") ##terrible hack - bypass zero suppression
1521  self.executeAndRemember("process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')") ##special readout mode for StripTracker
1522 
1523  return
1524 
1525  def prepare_REPACK(self, sequence = None):
1526  self.loadDefaultOrSpecifiedCFF(sequence,self.REPACKDefaultCFF)
1527  self.scheduleSequence(sequence.split('.')[-1],'digi2repack_step')
1528  return
1529 
1530  def prepare_L1(self, sequence = None):
1531  """ Enrich the schedule with the L1 simulation step"""
1532  assert(sequence == None)
1533  self.loadAndRemember(self.L1EMDefaultCFF)
1534  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1535  return
1536 
1537  def prepare_L1REPACK(self, sequence = None):
1538  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1539  supported = ['GT','GT1','GT2','GCTGT']
1540  if sequence in supported:
1541  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1542  if self._options.scenario == 'HeavyIons':
1543  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1544  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1545  else:
1546  print "L1REPACK with '",sequence,"' is not supported! Supported choices are: ",supported
1547  raise Exception('unsupported feature')
1548 
1549 
1550  def prepare_HLT(self, sequence = None):
1551  """ Enrich the schedule with the HLT simulation step"""
1552  if not sequence:
1553  print "no specification of the hlt menu has been given, should never happen"
1554  raise Exception('no HLT sequence provided')
1555 
1556  if '@' in sequence:
1557  # case where HLT:@something was provided
1558  from Configuration.HLT.autoHLT import autoHLT
1559  key = sequence[1:]
1560  if key in autoHLT:
1561  sequence = autoHLT[key]
1562  else:
1563  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1564 
1565  if ',' in sequence:
1566  #case where HLT:something:something was provided
1567  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1568  optionsForHLT = {}
1569  if self._options.scenario == 'HeavyIons':
1570  optionsForHLT['type'] = 'HIon'
1571  else:
1572  optionsForHLT['type'] = 'GRun'
1573  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.iteritems())
1574  if sequence == 'run,fromSource':
1575  if hasattr(self.process.source,'firstRun'):
1576  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1577  elif hasattr(self.process.source,'setRunNumber'):
1578  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1579  else:
1580  raise Exception('Cannot replace menu to load %s'%(sequence))
1581  else:
1582  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(sequence.replace(',',':'),optionsForHLTConfig))
1583  else:
1584  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % sequence)
1585 
1586  if self._options.isMC:
1587  if self._options.fast:
1588  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFastSim")
1589  else:
1590  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforFullSim")
1591 
1592  if self._options.name != 'HLT':
1593  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1594  self.additionalCommands.append('process = ProcessName(process)')
1595  self.additionalCommands.append('')
1596  from HLTrigger.Configuration.CustomConfigs import ProcessName
1597  self.process = ProcessName(self.process)
1598 
1599  self.schedule.append(self.process.HLTSchedule)
1600  [self.blacklist_paths.append(path) for path in self.process.HLTSchedule if isinstance(path,(cms.Path,cms.EndPath))]
1601 
1602  #this is a fake, to be removed with fastim migration and HLT menu dump
1603  if self._options.fast:
1604  if not hasattr(self.process,'HLTEndSequence'):
1605  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1606 
1607 
1608  def prepare_RAW2RECO(self, sequence = None):
1609  if ','in sequence:
1610  seqReco=sequence.split(',')[1]
1611  seqDigi=sequence.split(',')[0]
1612  else:
1613  print "RAW2RECO requires two specifications",sequence,"insufficient"
1614 
1615  self.prepare_RAW2DIGI(seqDigi)
1616  self.prepare_RECO(seqReco)
1617  return
1618 
1619  def prepare_RAW2DIGI(self, sequence = "RawToDigi"):
1620  self.loadDefaultOrSpecifiedCFF(sequence,self.RAW2DIGIDefaultCFF)
1621  self.scheduleSequence(sequence,'raw2digi_step')
1622  # if self._options.isRepacked:
1623  #self.renameInputTagsInSequence(sequence)
1624  return
1625 
1626  def prepare_PATFILTER(self, sequence=None):
1627  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1628  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1629  for filt in allMetFilterPaths:
1630  self.schedule.append(getattr(self.process,'Flag_'+filt))
1631 
1632  def prepare_L1HwVal(self, sequence = 'L1HwVal'):
1633  ''' Enrich the schedule with L1 HW validation '''
1634  self.loadDefaultOrSpecifiedCFF(sequence,self.L1HwValDefaultCFF)
1635  #self.scheduleSequence(sequence.split('.')[-1],'l1hwval_step')
1636  print '\n\n\n DEPRECATED this has no action \n\n\n'
1637  return
1638 
1639  def prepare_L1Reco(self, sequence = "L1Reco"):
1640  ''' Enrich the schedule with L1 reconstruction '''
1641  self.loadDefaultOrSpecifiedCFF(sequence,self.L1RecoDefaultCFF)
1642  self.scheduleSequence(sequence.split('.')[-1],'L1Reco_step')
1643  return
1644 
1645  def prepare_L1TrackTrigger(self, sequence = "L1TrackTrigger"):
1646  ''' Enrich the schedule with L1 reconstruction '''
1648  self.scheduleSequence(sequence.split('.')[-1],'L1TrackTrigger_step')
1649  return
1650 
1651  def prepare_FILTER(self, sequence = None):
1652  ''' Enrich the schedule with a user defined filter sequence '''
1653  ## load the relevant part
1654  filterConfig=self.load(sequence.split('.')[0])
1655  filterSeq=sequence.split('.')[-1]
1656  ## print it in the configuration
1658  def __init__(self):
1659  self.inliner=''
1660  pass
1661  def enter(self,visitee):
1662  try:
1663  label=visitee.label()
1664  ##needs to be in reverse order
1665  self.inliner=label+','+self.inliner
1666  except:
1667  pass
1668  def leave(self,v): pass
1669 
1670  expander=PrintAllModules()
1671  getattr(self.process,filterSeq).visit( expander )
1672  self._options.inlineObjets+=','+expander.inliner
1673  self._options.inlineObjets+=','+filterSeq
1674 
1675  ## put the filtering path in the schedule
1676  self.scheduleSequence(filterSeq,'filtering_step')
1677  self.nextScheduleIsConditional=True
1678  ## put it before all the other paths
1679  self.productionFilterSequence = filterSeq
1680 
1681  return
1682 
1683  def prepare_RECO(self, sequence = "reconstruction"):
1684  ''' Enrich the schedule with reconstruction '''
1685  self.loadDefaultOrSpecifiedCFF(sequence,self.RECODefaultCFF)
1686  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_step')
1687  return
1688 
1689  def prepare_RECOBEFMIX(self, sequence = "reconstruction"):
1690  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1691  if not self._options.fast:
1692  print "ERROR: this step is only implemented for FastSim"
1693  sys.exit()
1694  self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1695  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_befmix_step')
1696  return
1697 
1698  def prepare_PAT(self, sequence = "miniAOD"):
1699  ''' Enrich the schedule with PAT '''
1700  self.prepare_PATFILTER(self)
1701  self.loadDefaultOrSpecifiedCFF(sequence,self.PATDefaultCFF,1) #this is unscheduled
1702  if not self._options.runUnscheduled:
1703  raise Exception("MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1704  if self._options.isData:
1705  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1706  else:
1707  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1708  if self._options.fast:
1709  self._options.customisation_file_unsch.append("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff.miniAOD_customizeMETFiltersFastSim")
1710  return
1711 
1712  def prepare_EI(self, sequence = None):
1713  ''' Enrich the schedule with event interpretation '''
1714  from Configuration.StandardSequences.EventInterpretation import EventInterpretation
1715  if sequence in EventInterpretation:
1716  self.EIDefaultCFF = EventInterpretation[sequence]
1717  sequence = 'EIsequence'
1718  else:
1719  raise Exception('Cannot set %s event interpretation'%( sequence) )
1720  self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1721  self.scheduleSequence(sequence.split('.')[-1],'eventinterpretaion_step')
1722  return
1723 
1724  def prepare_SKIM(self, sequence = "all"):
1725  ''' Enrich the schedule with skimming fragments'''
1726  skimConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.SKIMDefaultCFF)
1727  sequence = sequence.split('.')[-1]
1728 
1729  skimlist=sequence.split('+')
1730  ## support @Mu+DiJet+@Electron configuration via autoSkim.py
1731  from Configuration.Skimming.autoSkim import autoSkim
1732  self.expandMapping(skimlist,autoSkim)
1733 
1734  #print "dictionnary for skims:",skimConfig.__dict__
1735  for skim in skimConfig.__dict__:
1736  skimstream = getattr(skimConfig,skim)
1737  if isinstance(skimstream,cms.Path):
1738  #black list the alca path so that they do not appear in the cfg
1739  self.blacklist_paths.append(skimstream)
1740  if (not isinstance(skimstream,cms.FilteredStream)):
1741  continue
1742  shortname = skim.replace('SKIMStream','')
1743  if (sequence=="all"):
1744  self.addExtraStream(skim,skimstream)
1745  elif (shortname in skimlist):
1746  self.addExtraStream(skim,skimstream)
1747  #add a DQM eventcontent for this guy
1748  if self._options.datatier=='DQM':
1749  self.process.load(self.EVTCONTDefaultCFF)
1750  skimstreamDQM = cms.FilteredStream(
1751  responsible = skimstream.responsible,
1752  name = skimstream.name+'DQM',
1753  paths = skimstream.paths,
1754  selectEvents = skimstream.selectEvents,
1755  content = self._options.datatier+'EventContent',
1756  dataTier = cms.untracked.string(self._options.datatier)
1757  )
1758  self.addExtraStream(skim+'DQM',skimstreamDQM)
1759  for i in range(skimlist.count(shortname)):
1760  skimlist.remove(shortname)
1761 
1762 
1763 
1764  if (skimlist.__len__()!=0 and sequence!="all"):
1765  print 'WARNING, possible typo with SKIM:'+'+'.join(skimlist)
1766  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1767 
1768  def prepare_USER(self, sequence = None):
1769  ''' Enrich the schedule with a user defined sequence '''
1770  self.loadDefaultOrSpecifiedCFF(sequence,self.USERDefaultCFF)
1771  self.scheduleSequence(sequence.split('.')[-1],'user_step')
1772  return
1773 
1774  def prepare_POSTRECO(self, sequence = None):
1775  """ Enrich the schedule with the postreco step """
1776  self.loadAndRemember(self.POSTRECODefaultCFF)
1777  self.scheduleSequence('postreco_generator','postreco_step')
1778  return
1779 
1780 
1781  def prepare_VALIDATION(self, sequence = 'validation'):
1782  print sequence,"in preparing validation"
1783  self.loadDefaultOrSpecifiedCFF(sequence,self.VALIDATIONDefaultCFF)
1784  from Validation.Configuration.autoValidation import autoValidation
1785  #in case VALIDATION:something:somethingelse -> something,somethingelse
1786  sequence=sequence.split('.')[-1]
1787  if sequence.find(',')!=-1:
1788  prevalSeqName=sequence.split(',')[0].split('+')
1789  valSeqName=sequence.split(',')[1].split('+')
1790  self.expandMapping(prevalSeqName,autoValidation,index=0)
1791  self.expandMapping(valSeqName,autoValidation,index=1)
1792  else:
1793  if '@' in sequence:
1794  prevalSeqName=sequence.split('+')
1795  valSeqName=sequence.split('+')
1796  self.expandMapping(prevalSeqName,autoValidation,index=0)
1797  self.expandMapping(valSeqName,autoValidation,index=1)
1798  else:
1799  postfix=''
1800  if sequence:
1801  postfix='_'+sequence
1802  prevalSeqName=['prevalidation'+postfix]
1803  valSeqName=['validation'+postfix]
1804  if not hasattr(self.process,valSeqName[0]):
1805  prevalSeqName=['']
1806  valSeqName=[sequence]
1807 
1808  def NFI(index):
1809  ##name from index, required to keep backward compatibility
1810  if index==0:
1811  return ''
1812  else:
1813  return '%s'%index
1814 
1815 
1816  #rename the HLT process in validation steps
1817  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1818  for s in valSeqName+prevalSeqName:
1819  if s:
1821  for (i,s) in enumerate(prevalSeqName):
1822  if s:
1823  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1824  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1825 
1826  for (i,s) in enumerate(valSeqName):
1827  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1828  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1829 
1830  #needed in case the miniAODValidation sequence is run starting from AODSIM
1831  if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1832  return
1833 
1834  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1835  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1836  self._options.restoreRNDSeeds=True
1837 
1838  if not 'DIGI' in self.stepMap and not self._options.fast:
1839  self.executeAndRemember("process.mix.playback = True")
1840  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1841  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1842  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1843 
1844  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1845  #will get in the schedule, smoothly
1846  for (i,s) in enumerate(valSeqName):
1847  getattr(self.process,'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,'validation_step%s'%NFI(i))._seq
1848 
1849  return
1850 
1851 
1853  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1854  It will climb down within PSets, VPSets and VInputTags to find its target"""
1855  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1856  self._paramReplace = paramReplace
1857  self._paramSearch = paramSearch
1858  self._verbose = verbose
1859  self._whitelist = whitelist
1860 
1861  def doIt(self,pset,base):
1862  if isinstance(pset, cms._Parameterizable):
1863  for name in pset.parameters_().keys():
1864  # skip whitelisted parameters
1865  if name in self._whitelist:
1866  continue
1867  # if I use pset.parameters_().items() I get copies of the parameter values
1868  # so I can't modify the nested pset
1869  value = getattr(pset,name)
1870  type = value.pythonTypeName()
1871  if type in ('cms.PSet', 'cms.untracked.PSet'):
1872  self.doIt(value,base+"."+name)
1873  elif type in ('cms.VPSet', 'cms.untracked.VPSet'):
1874  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1875  elif type in ('cms.string', 'cms.untracked.string'):
1876  if value.value() == self._paramSearch:
1877  if self._verbose: print "set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace)
1878  setattr(pset, name,self._paramReplace)
1879  elif type in ('cms.VInputTag', 'cms.untracked.VInputTag'):
1880  for (i,n) in enumerate(value):
1881  if not isinstance(n, cms.InputTag):
1882  n=cms.InputTag(n)
1883  if n.processName == self._paramSearch:
1884  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1885  if self._verbose:print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace)
1886  setattr(n,"processName",self._paramReplace)
1887  value[i]=n
1888  elif type in ('cms.vstring', 'cms.untracked.vstring'):
1889  for (i,n) in enumerate(value):
1890  if n==self._paramSearch:
1891  getattr(pset,name)[i]=self._paramReplace
1892  elif type in ('cms.InputTag', 'cms.untracked.InputTag'):
1893  if value.processName == self._paramSearch:
1894  if self._verbose: print "set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace)
1895  setattr(getattr(pset, name),"processName",self._paramReplace)
1896 
1897  def enter(self,visitee):
1898  label = ''
1899  try:
1900  label = visitee.label()
1901  except AttributeError:
1902  label = '<Module not in a Process>'
1903  except:
1904  label = 'other execption'
1905  self.doIt(visitee, label)
1906 
1907  def leave(self,visitee):
1908  pass
1909 
1910  #visit a sequence to repalce all input tags
1911  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1912  print "Replacing all InputTag %s => %s"%(oldT,newT)
1913  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1914  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1915  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1916  if not loadMe in self.additionalCommands:
1917  self.additionalCommands.append(loadMe)
1918  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1919 
1920  #change the process name used to address HLT results in any sequence
1921  def renameHLTprocessInSequence(self,sequence,proc=None,HLTprocess='HLT'):
1922  if self._options.hltProcess:
1923  proc=self._options.hltProcess
1924  else:
1925  proc=self.process.name_()
1926  if proc==HLTprocess: return
1927  # look up all module in dqm sequence
1928  print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1929  getattr(self.process,sequence).visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess,proc,whitelist = ("subSystemFolder",)))
1930  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1931  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1932  self.additionalCommands.append('process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1933 
1934 
1935  def expandMapping(self,seqList,mapping,index=None):
1936  maxLevel=20
1937  level=0
1938  while '@' in repr(seqList) and level<maxLevel:
1939  level+=1
1940  for specifiedCommand in seqList:
1941  if specifiedCommand.startswith('@'):
1942  location=specifiedCommand[1:]
1943  if not location in mapping:
1944  raise Exception("Impossible to map "+location+" from "+repr(mapping))
1945  mappedTo=mapping[location]
1946  if index!=None:
1947  mappedTo=mappedTo[index]
1948  seqList.remove(specifiedCommand)
1949  seqList.extend(mappedTo.split('+'))
1950  break;
1951  if level==maxLevel:
1952  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
1953 
1954  def prepare_DQM(self, sequence = 'DQMOffline'):
1955  # this one needs replacement
1956 
1957  self.loadDefaultOrSpecifiedCFF(sequence,self.DQMOFFLINEDefaultCFF)
1958  sequenceList=sequence.split('.')[-1].split('+')
1959  from DQMOffline.Configuration.autoDQM import autoDQM
1960  self.expandMapping(sequenceList,autoDQM,index=0)
1961 
1962  if len(set(sequenceList))!=len(sequenceList):
1963  sequenceList=list(set(sequenceList))
1964  print "Duplicate entries for DQM:, using",sequenceList
1965  pathName='dqmoffline_step'
1966 
1967  for (i,sequence) in enumerate(sequenceList):
1968  if (i!=0):
1969  pathName='dqmoffline_%d_step'%(i)
1970 
1971  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
1972  self.renameHLTprocessInSequence(sequence)
1973 
1974  # if both HLT and DQM are run in the same process, schedule [HLT]DQM in an EndPath
1975  if 'HLT' in self.stepMap.keys():
1976  # need to put [HLT]DQM in an EndPath, to access the HLT trigger results
1977  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1978  else:
1979  # schedule DQM as a standard Path
1980  setattr(self.process,pathName, cms.Path( getattr(self.process, sequence) ) )
1981  self.schedule.append(getattr(self.process,pathName))
1982 
1983 
1984  def prepare_HARVESTING(self, sequence = None):
1985  """ Enrich the process with harvesting step """
1986  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
1987  self.loadAndRemember(self.DQMSaverCFF)
1988 
1989  harvestingConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.HARVESTINGDefaultCFF)
1990  sequence = sequence.split('.')[-1]
1991 
1992  # decide which HARVESTING paths to use
1993  harvestingList = sequence.split("+")
1994  from DQMOffline.Configuration.autoDQM import autoDQM
1995  from Validation.Configuration.autoValidation import autoValidation
1996  import copy
1997  combined_mapping = copy.deepcopy( autoDQM )
1998  combined_mapping.update( autoValidation )
1999  self.expandMapping(harvestingList,combined_mapping,index=-1)
2000 
2001  if len(set(harvestingList))!=len(harvestingList):
2002  harvestingList=list(set(harvestingList))
2003  print "Duplicate entries for HARVESTING, using",harvestingList
2004 
2005  for name in harvestingList:
2006  if not name in harvestingConfig.__dict__:
2007  print name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
2008  continue
2009  harvestingstream = getattr(harvestingConfig,name)
2010  if isinstance(harvestingstream,cms.Path):
2011  self.schedule.append(harvestingstream)
2012  self.blacklist_paths.append(harvestingstream)
2013  if isinstance(harvestingstream,cms.Sequence):
2014  setattr(self.process,name+"_step",cms.Path(harvestingstream))
2015  self.schedule.append(getattr(self.process,name+"_step"))
2016 
2017  self.scheduleSequence('DQMSaver','dqmsave_step')
2018  return
2019 
2020  def prepare_ALCAHARVEST(self, sequence = None):
2021  """ Enrich the process with AlCaHarvesting step """
2022  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2023  sequence=sequence.split(".")[-1]
2024 
2025  # decide which AlcaHARVESTING paths to use
2026  harvestingList = sequence.split("+")
2027 
2028 
2029 
2030  from Configuration.AlCa.autoPCL import autoPCL
2031  self.expandMapping(harvestingList,autoPCL)
2032 
2033  for name in harvestingConfig.__dict__:
2034  harvestingstream = getattr(harvestingConfig,name)
2035  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2036  self.schedule.append(harvestingstream)
2037  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2038  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2039  harvestingList.remove(name)
2040  # append the common part at the end of the sequence
2041  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2042  self.schedule.append(lastStep)
2043 
2044  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2045  print "The following harvesting could not be found : ", harvestingList
2046  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2047 
2048 
2049 
2050  def prepare_ENDJOB(self, sequence = 'endOfProcess'):
2051  self.loadDefaultOrSpecifiedCFF(sequence,self.ENDJOBDefaultCFF)
2052  self.scheduleSequenceAtEnd(sequence.split('.')[-1],'endjob_step')
2053  return
2054 
2056  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2057  self.schedule.append(self.process.reconstruction)
2058 
2059 
2060  def build_production_info(self, evt_type, evtnumber):
2061  """ Add useful info for the production. """
2062  self.process.configurationMetadata=cms.untracked.PSet\
2063  (version=cms.untracked.string("$Revision: 1.19 $"),
2064  name=cms.untracked.string("Applications"),
2065  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2066  )
2067 
2068  self.addedObjects.append(("Production Info","configurationMetadata"))
2069 
2070 
2071  def prepare(self, doChecking = False):
2072  """ Prepare the configuration string and add missing pieces."""
2073 
2074  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2075  self.addMaxEvents()
2076  if self.with_input:
2077  self.addSource()
2078  self.addStandardSequences()
2079  ##adding standard sequences might change the inputEventContent option and therefore needs to be finalized after
2080  self.completeInputCommand()
2081  self.addConditions()
2082 
2083 
2084  outputModuleCfgCode=""
2085  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2086  outputModuleCfgCode=self.addOutput()
2087 
2088  self.addCommon()
2089 
2090  self.pythonCfgCode = "# Auto generated configuration file\n"
2091  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2092  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2093  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2094  if hasattr(self._options,"era") and self._options.era :
2095  self.pythonCfgCode += "from Configuration.StandardSequences.Eras import eras\n\n"
2096  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"'" # Start of the line, finished after the loop
2097  # Multiple eras can be specified in a comma seperated list
2098  for requestedEra in self._options.era.split(",") :
2099  self.pythonCfgCode += ",eras."+requestedEra
2100  self.pythonCfgCode += ")\n\n" # end of the line
2101  else :
2102  self.pythonCfgCode += "process = cms.Process('"+self.process.name_()+"')\n\n"
2103 
2104  self.pythonCfgCode += "# import of standard configurations\n"
2105  for module in self.imports:
2106  self.pythonCfgCode += ("process.load('"+module+"')\n")
2107 
2108  # production info
2109  if not hasattr(self.process,"configurationMetadata"):
2110  self.build_production_info(self._options.evt_type, self._options.number)
2111  else:
2112  #the PSet was added via a load
2113  self.addedObjects.append(("Production Info","configurationMetadata"))
2114 
2115  self.pythonCfgCode +="\n"
2116  for comment,object in self.addedObjects:
2117  if comment!="":
2118  self.pythonCfgCode += "\n# "+comment+"\n"
2119  self.pythonCfgCode += dumpPython(self.process,object)
2120 
2121  # dump the output definition
2122  self.pythonCfgCode += "\n# Output definition\n"
2123  self.pythonCfgCode += outputModuleCfgCode
2124 
2125  # dump all additional outputs (e.g. alca or skim streams)
2126  self.pythonCfgCode += "\n# Additional output definition\n"
2127  #I do not understand why the keys are not normally ordered.
2128  nl=self.additionalOutputs.keys()
2129  nl.sort()
2130  for name in nl:
2131  output = self.additionalOutputs[name]
2132  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2133  tmpOut = cms.EndPath(output)
2134  setattr(self.process,name+'OutPath',tmpOut)
2135  self.schedule.append(tmpOut)
2136 
2137  # dump all additional commands
2138  self.pythonCfgCode += "\n# Other statements\n"
2139  for command in self.additionalCommands:
2140  self.pythonCfgCode += command + "\n"
2141 
2142  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2143  for object in self._options.inlineObjets.split(','):
2144  if not object:
2145  continue
2146  if not hasattr(self.process,object):
2147  print 'cannot inline -'+object+'- : not known'
2148  else:
2149  self.pythonCfgCode +='\n'
2150  self.pythonCfgCode +=dumpPython(self.process,object)
2151 
2152  # dump all paths
2153  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2154  for path in self.process.paths:
2155  if getattr(self.process,path) not in self.blacklist_paths:
2156  self.pythonCfgCode += dumpPython(self.process,path)
2157 
2158  for endpath in self.process.endpaths:
2159  if getattr(self.process,endpath) not in self.blacklist_paths:
2160  self.pythonCfgCode += dumpPython(self.process,endpath)
2161 
2162  # dump the schedule
2163  self.pythonCfgCode += "\n# Schedule definition\n"
2164  result = "process.schedule = cms.Schedule("
2165 
2166  # handling of the schedule
2167  self.process.schedule = cms.Schedule()
2168  for item in self.schedule:
2169  if not isinstance(item, cms.Schedule):
2170  self.process.schedule.append(item)
2171  else:
2172  self.process.schedule.extend(item)
2173 
2174  if hasattr(self.process,"HLTSchedule"):
2175  beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2176  afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2177  pathNames = ['process.'+p.label_() for p in beforeHLT]
2178  result += ','.join(pathNames)+')\n'
2179  result += 'process.schedule.extend(process.HLTSchedule)\n'
2180  pathNames = ['process.'+p.label_() for p in afterHLT]
2181  result += 'process.schedule.extend(['+','.join(pathNames)+'])\n'
2182  else:
2183  pathNames = ['process.'+p.label_() for p in self.schedule]
2184  result ='process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2185 
2186  self.pythonCfgCode += result
2187 
2188  if self._options.nThreads is not "1":
2189  self.pythonCfgCode +="\n"
2190  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2191  self.pythonCfgCode +="process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+")\n"
2192  self.pythonCfgCode +="process.options.numberOfStreams=cms.untracked.uint32(0)\n"
2193  #repacked version
2194  if self._options.isRepacked:
2195  self.pythonCfgCode +="\n"
2196  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2197  self.pythonCfgCode +="MassReplaceInputTag(process)\n"
2198  MassReplaceInputTag(self.process)
2199 
2200  # special treatment in case of production filter sequence 2/2
2201  if self.productionFilterSequence:
2202  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2203  self.pythonCfgCode +='for path in process.paths:\n'
2204  if len(self.conditionalPaths):
2205  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2206  if len(self.excludedPaths):
2207  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2208  self.pythonCfgCode +='\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2209  pfs = getattr(self.process,self.productionFilterSequence)
2210  for path in self.process.paths:
2211  if not path in self.conditionalPaths: continue
2212  if path in self.excludedPaths: continue
2213  getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2214 
2215 
2216  # dump customise fragment
2217  self.pythonCfgCode += self.addCustomise()
2218 
2219  if self._options.runUnscheduled:
2220  # prune and delete paths
2221  #this is not supporting the blacklist at this point since I do not understand it
2222  self.pythonCfgCode+="#do not add changes to your config after this point (unless you know what you are doing)\n"
2223  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import convertToUnscheduled\n"
2224  self.pythonCfgCode+="process=convertToUnscheduled(process)\n"
2225 
2226  from FWCore.ParameterSet.Utilities import convertToUnscheduled
2227  self.process=convertToUnscheduled(self.process)
2228 
2229  #now add the unscheduled stuff
2230  for module in self.importsUnsch:
2231  self.process.load(module)
2232  self.pythonCfgCode += ("process.load('"+module+"')\n")
2233 
2234  #and clean the unscheduled stuff
2235  self.pythonCfgCode+="from FWCore.ParameterSet.Utilities import cleanUnscheduled\n"
2236  self.pythonCfgCode+="process=cleanUnscheduled(process)\n"
2237 
2238  from FWCore.ParameterSet.Utilities import cleanUnscheduled
2239  self.process=cleanUnscheduled(self.process)
2240 
2241 
2242  self.pythonCfgCode += self.addCustomise(1)
2243 
2244 
2245  # make the .io file
2246 
2247  if self._options.io:
2248  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2249  if not self._options.io.endswith('.io'): self._option.io+='.io'
2250  io=open(self._options.io,'w')
2251  ioJson={}
2252  if hasattr(self.process.source,"fileNames"):
2253  if len(self.process.source.fileNames.value()):
2254  ioJson['primary']=self.process.source.fileNames.value()
2255  if hasattr(self.process.source,"secondaryFileNames"):
2256  if len(self.process.source.secondaryFileNames.value()):
2257  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2258  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2259  ioJson['pileup']=self._options.pileup_input[4:]
2260  for (o,om) in self.process.outputModules_().items():
2261  ioJson[o]=om.fileName.value()
2262  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2263  if self.productionFilterSequence:
2264  ioJson['filter']=self.productionFilterSequence
2265  import json
2266  io.write(json.dumps(ioJson))
2267  return
2268 
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:34
assert(m_qm.get())
def visit
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_RECO
put the filtering path in the schedule
def massSearchReplaceAnyInputTag
Definition: helpers.py:262
def defineMixing
Definition: Mixing.py:168
inliner
needs to be in reverse order
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def convertToUnscheduled
Definition: Utilities.py:69
list object
Definition: dbtoconf.py:77
def cleanUnscheduled
Definition: Utilities.py:107
double split
Definition: MVATrainer.cc:139
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run