CMS 3D CMS Logo

ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python3
2 
3 from __future__ import print_function
4 __version__ = "$Revision: 1.19 $"
5 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
6 
7 import FWCore.ParameterSet.Config as cms
8 from FWCore.ParameterSet.Modules import _Module
9 # The following import is provided for backward compatibility reasons.
10 # The function used to be defined in this file.
11 from FWCore.ParameterSet.MassReplace import massReplaceInputTag as MassReplaceInputTag
12 
13 import hashlib
14 import sys
15 import re
16 import collections
17 from subprocess import Popen,PIPE
18 import FWCore.ParameterSet.DictTypes as DictTypes
19 class Options:
20  pass
21 
22 # the canonical defaults
23 defaultOptions = Options()
24 defaultOptions.datamix = 'DataOnSim'
25 defaultOptions.isMC=False
26 defaultOptions.isData=True
27 defaultOptions.step=''
28 defaultOptions.pileup='NoPileUp'
29 defaultOptions.pileup_input = None
30 defaultOptions.pileup_dasoption = ''
31 defaultOptions.geometry = 'SimDB'
32 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
33 defaultOptions.magField = ''
34 defaultOptions.conditions = None
35 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
36 defaultOptions.harvesting= 'AtRunEnd'
37 defaultOptions.gflash = False
38 defaultOptions.number = -1
39 defaultOptions.number_out = None
40 defaultOptions.arguments = ""
41 defaultOptions.name = "NO NAME GIVEN"
42 defaultOptions.evt_type = ""
43 defaultOptions.filein = ""
44 defaultOptions.dasquery=""
45 defaultOptions.dasoption=""
46 defaultOptions.secondfilein = ""
47 defaultOptions.customisation_file = []
48 defaultOptions.customisation_file_unsch = []
49 defaultOptions.customise_commands = ""
50 defaultOptions.inline_custom=False
51 defaultOptions.particleTable = 'pythiapdt'
52 defaultOptions.particleTableList = ['pythiapdt','pdt']
53 defaultOptions.dirin = ''
54 defaultOptions.dirout = ''
55 defaultOptions.filetype = 'EDM'
56 defaultOptions.fileout = 'output.root'
57 defaultOptions.filtername = ''
58 defaultOptions.lazy_download = False
59 defaultOptions.custom_conditions = ''
60 defaultOptions.hltProcess = ''
61 defaultOptions.eventcontent = None
62 defaultOptions.datatier = None
63 defaultOptions.inlineEventContent = True
64 defaultOptions.inlineObjets =''
65 defaultOptions.hideGen=False
66 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
67 defaultOptions.beamspot=None
68 defaultOptions.outputDefinition =''
69 defaultOptions.inputCommands = None
70 defaultOptions.outputCommands = None
71 defaultOptions.inputEventContent = ''
72 defaultOptions.dropDescendant = False
73 defaultOptions.relval = None
74 defaultOptions.profile = None
75 defaultOptions.isRepacked = False
76 defaultOptions.restoreRNDSeeds = False
77 defaultOptions.donotDropOnInput = ''
78 defaultOptions.python_filename =''
79 defaultOptions.io=None
80 defaultOptions.lumiToProcess=None
81 defaultOptions.fast=False
82 defaultOptions.runsAndWeightsForMC = None
83 defaultOptions.runsScenarioForMC = None
84 defaultOptions.runsAndWeightsForMCIntegerWeights = None
85 defaultOptions.runsScenarioForMCIntegerWeights = None
86 defaultOptions.runUnscheduled = False
87 defaultOptions.timeoutOutput = False
88 defaultOptions.nThreads = '1'
89 defaultOptions.nStreams = '0'
90 defaultOptions.nConcurrentLumis = '0'
91 defaultOptions.nConcurrentIOVs = '0'
92 defaultOptions.accelerators = None
93 
94 # some helper routines
95 def dumpPython(process,name):
96  theObject = getattr(process,name)
97  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
98  return "process."+name+" = " + theObject.dumpPython()
99  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
100  return "process."+name+" = " + theObject.dumpPython()+"\n"
101  else:
102  return "process."+name+" = " + theObject.dumpPython()+"\n"
103 def filesFromList(fileName,s=None):
104  import os
105  import FWCore.ParameterSet.Config as cms
106  prim=[]
107  sec=[]
108  for line in open(fileName,'r'):
109  if line.count(".root")>=2:
110  #two files solution...
111  entries=line.replace("\n","").split()
112  prim.append(entries[0])
113  sec.append(entries[1])
114  elif (line.find(".root")!=-1):
115  entry=line.replace("\n","")
116  prim.append(entry)
117  # remove any duplicates but keep the order
118  file_seen = set()
119  prim = [f for f in prim if not (f in file_seen or file_seen.add(f))]
120  file_seen = set()
121  sec = [f for f in sec if not (f in file_seen or file_seen.add(f))]
122  if s:
123  if not hasattr(s,"fileNames"):
124  s.fileNames=cms.untracked.vstring(prim)
125  else:
126  s.fileNames.extend(prim)
127  if len(sec)!=0:
128  if not hasattr(s,"secondaryFileNames"):
129  s.secondaryFileNames=cms.untracked.vstring(sec)
130  else:
131  s.secondaryFileNames.extend(sec)
132  print("found files: ",prim)
133  if len(prim)==0:
134  raise Exception("There are not files in input from the file list")
135  if len(sec)!=0:
136  print("found parent files:",sec)
137  return (prim,sec)
138 
139 def filesFromDASQuery(query,option="",s=None):
140  import os,time
141  import FWCore.ParameterSet.Config as cms
142  prim=[]
143  sec=[]
144  print("the query is",query)
145  eC=5
146  count=0
147  while eC!=0 and count<3:
148  if count!=0:
149  print('Sleeping, then retrying DAS')
150  time.sleep(100)
151  p = Popen('dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=True, universal_newlines=True)
152  pipe=p.stdout.read()
153  tupleP = os.waitpid(p.pid, 0)
154  eC=tupleP[1]
155  count=count+1
156  if eC==0:
157  print("DAS succeeded after",count,"attempts",eC)
158  else:
159  print("DAS failed 3 times- I give up")
160  for line in pipe.split('\n'):
161  if line.count(".root")>=2:
162  #two files solution...
163  entries=line.replace("\n","").split()
164  prim.append(entries[0])
165  sec.append(entries[1])
166  elif (line.find(".root")!=-1):
167  entry=line.replace("\n","")
168  prim.append(entry)
169  # remove any duplicates
170  prim = sorted(list(set(prim)))
171  sec = sorted(list(set(sec)))
172  if s:
173  if not hasattr(s,"fileNames"):
174  s.fileNames=cms.untracked.vstring(prim)
175  else:
176  s.fileNames.extend(prim)
177  if len(sec)!=0:
178  if not hasattr(s,"secondaryFileNames"):
179  s.secondaryFileNames=cms.untracked.vstring(sec)
180  else:
181  s.secondaryFileNames.extend(sec)
182  print("found files: ",prim)
183  if len(sec)!=0:
184  print("found parent files:",sec)
185  return (prim,sec)
186 
187 def anyOf(listOfKeys,dict,opt=None):
188  for k in listOfKeys:
189  if k in dict:
190  toReturn=dict[k]
191  dict.pop(k)
192  return toReturn
193  if opt!=None:
194  return opt
195  else:
196  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
197 
199  """The main building routines """
200 
201  def __init__(self, options, process = None, with_output = False, with_input = False ):
202  """options taken from old cmsDriver and optparse """
203 
204  options.outfile_name = options.dirout+options.fileout
205 
206  self._options = options
207 
208  if self._options.isData and options.isMC:
209  raise Exception("ERROR: You may specify only --data or --mc, not both")
210  #if not self._options.conditions:
211  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
212 
213  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
214  if 'ENDJOB' in self._options.step:
215  if (hasattr(self._options,"outputDefinition") and \
216  self._options.outputDefinition != '' and \
217  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
218  (hasattr(self._options,"datatier") and \
219  self._options.datatier and \
220  'DQMIO' in self._options.datatier):
221  print("removing ENDJOB from steps since not compatible with DQMIO dataTier")
222  self._options.step=self._options.step.replace(',ENDJOB','')
223 
224 
225 
226  # what steps are provided by this class?
227  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
228  self.stepMap={}
229  self.stepKeys=[]
230  for step in self._options.step.split(","):
231  if step=='': continue
232  stepParts = step.split(":")
233  stepName = stepParts[0]
234  if stepName not in stepList and not stepName.startswith('re'):
235  raise ValueError("Step {} unknown. Available are {}".format( stepName , sorted(stepList)))
236  if len(stepParts)==1:
237  self.stepMap[stepName]=""
238  elif len(stepParts)==2:
239  self.stepMap[stepName]=stepParts[1].split('+')
240  elif len(stepParts)==3:
241  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
242  else:
243  raise ValueError(f"Step definition {step} invalid")
244  self.stepKeys.append(stepName)
245 
246  #print(f"map of steps is: {self.stepMap}")
247 
248  self.with_output = with_output
249  self.process=process
250 
251  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
252  self.with_output = False
253  self.with_input = with_input
254  self.imports = []
255  self.create_process()
256  self.define_Configs()
257  self.schedule = list()
259 
260  # we are doing three things here:
261  # creating a process to catch errors
262  # building the code to re-create the process
263 
265  # TODO: maybe a list of to be dumped objects would help as well
266  self.blacklist_paths = []
267  self.addedObjects = []
269 
275 
276  def profileOptions(self):
277  """
278  addIgProfService
279  Function to add the igprof profile service so that you can dump in the middle
280  of the run.
281  """
282  profileOpts = self._options.profile.split(':')
283  profilerStart = 1
284  profilerInterval = 100
285  profilerFormat = None
286  profilerJobFormat = None
287 
288  if len(profileOpts):
289  #type, given as first argument is unused here
290  profileOpts.pop(0)
291  if len(profileOpts):
292  startEvent = profileOpts.pop(0)
293  if not startEvent.isdigit():
294  raise Exception("%s is not a number" % startEvent)
295  profilerStart = int(startEvent)
296  if len(profileOpts):
297  eventInterval = profileOpts.pop(0)
298  if not eventInterval.isdigit():
299  raise Exception("%s is not a number" % eventInterval)
300  profilerInterval = int(eventInterval)
301  if len(profileOpts):
302  profilerFormat = profileOpts.pop(0)
303 
304 
305  if not profilerFormat:
306  profilerFormat = "%s___%s___%%I.gz" % (
307  self._options.evt_type.replace("_cfi", ""),
308  hashlib.md5(
309  (str(self._options.step) + str(self._options.pileup) + str(self._options.conditions) +
310  str(self._options.datatier) + str(self._options.profileTypeLabel)).encode('utf-8')
311  ).hexdigest()
312  )
313  if not profilerJobFormat and profilerFormat.endswith(".gz"):
314  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
315  elif not profilerJobFormat:
316  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
317 
318  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
319 
320  def load(self,includeFile):
321  includeFile = includeFile.replace('/','.')
322  self.process.load(includeFile)
323  return sys.modules[includeFile]
324 
325  def loadAndRemember(self, includeFile):
326  """helper routine to load am memorize imports"""
327  # we could make the imports a on-the-fly data method of the process instance itself
328  # not sure if the latter is a good idea
329  includeFile = includeFile.replace('/','.')
330  self.imports.append(includeFile)
331  self.process.load(includeFile)
332  return sys.modules[includeFile]
333 
334  def executeAndRemember(self, command):
335  """helper routine to remember replace statements"""
336  self.additionalCommands.append(command)
337  if not command.strip().startswith("#"):
338  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
339  import re
340  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
341  #exec(command.replace("process.","self.process."))
342 
343  def addCommon(self):
344  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
345  self.process.options.Rethrow = ['ProductNotFound']
346  self.process.options.fileMode = 'FULLMERGE'
347 
348  self.addedObjects.append(("","options"))
349 
350  if self._options.lazy_download:
351  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
352  stats = cms.untracked.bool(True),
353  enable = cms.untracked.bool(True),
354  cacheHint = cms.untracked.string("lazy-download"),
355  readHint = cms.untracked.string("read-ahead-buffered")
356  )
357  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
358 
359  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
360  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
361 
362  if self._options.profile:
363  (start, interval, eventFormat, jobFormat)=self.profileOptions()
364  self.process.IgProfService = cms.Service("IgProfService",
365  reportFirstEvent = cms.untracked.int32(start),
366  reportEventInterval = cms.untracked.int32(interval),
367  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
368  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
369  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
370 
371  def addMaxEvents(self):
372  """Here we decide how many evts will be processed"""
373  self.process.maxEvents.input = int(self._options.number)
374  if self._options.number_out:
375  self.process.maxEvents.output = int(self._options.number_out)
376  self.addedObjects.append(("","maxEvents"))
377 
378  def addSource(self):
379  """Here the source is built. Priority: file, generator"""
380  self.addedObjects.append(("Input source","source"))
381 
382  def filesFromOption(self):
383  for entry in self._options.filein.split(','):
384  print("entry",entry)
385  if entry.startswith("filelist:"):
386  filesFromList(entry[9:],self.process.source)
387  elif entry.startswith("dbs:") or entry.startswith("das:"):
388  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
389  else:
390  self.process.source.fileNames.append(self._options.dirin+entry)
391  if self._options.secondfilein:
392  if not hasattr(self.process.source,"secondaryFileNames"):
393  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
394  for entry in self._options.secondfilein.split(','):
395  print("entry",entry)
396  if entry.startswith("filelist:"):
397  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
398  elif entry.startswith("dbs:") or entry.startswith("das:"):
399  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
400  else:
401  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
402 
403  if self._options.filein or self._options.dasquery:
404  if self._options.filetype == "EDM":
405  self.process.source=cms.Source("PoolSource",
406  fileNames = cms.untracked.vstring(),
407  secondaryFileNames= cms.untracked.vstring())
408  filesFromOption(self)
409  elif self._options.filetype == "DAT":
410  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
411  filesFromOption(self)
412  elif self._options.filetype == "LHE":
413  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
414  if self._options.filein.startswith("lhe:"):
415  #list the article directory automatically
416  args=self._options.filein.split(':')
417  article=args[1]
418  print('LHE input from article ',article)
419  location='/store/lhe/'
420  import os
421  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
422  for line in textOfFiles:
423  for fileName in [x for x in line.split() if '.lhe' in x]:
424  self.process.source.fileNames.append(location+article+'/'+fileName)
425  #check first if list of LHE files is loaded (not empty)
426  if len(line)<2:
427  print('Issue to load LHE files, please check and try again.')
428  sys.exit(-1)
429  #Additional check to protect empty fileNames in process.source
430  if len(self.process.source.fileNames)==0:
431  print('Issue with empty filename, but can pass line check')
432  sys.exit(-1)
433  if len(args)>2:
434  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
435  else:
436  filesFromOption(self)
437 
438  elif self._options.filetype == "DQM":
439  self.process.source=cms.Source("DQMRootSource",
440  fileNames = cms.untracked.vstring())
441  filesFromOption(self)
442 
443  elif self._options.filetype == "DQMDAQ":
444  # FIXME: how to configure it if there are no input files specified?
445  self.process.source=cms.Source("DQMStreamerReader")
446 
447 
448  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
449  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
450 
451  if self._options.dasquery!='':
452  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
453  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
454 
455  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
456  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
457 
458 
459  if 'GEN' in self.stepMap.keys() and not self._options.filetype == "LHE":
460  if self._options.inputCommands:
461  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
462  else:
463  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
464 
465  if self.process.source and self._options.inputCommands and not self._options.filetype == "LHE":
466  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
467  for command in self._options.inputCommands.split(','):
468  # remove whitespace around the keep/drop statements
469  command = command.strip()
470  if command=='': continue
471  self.process.source.inputCommands.append(command)
472  if not self._options.dropDescendant:
473  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
474 
475  if self._options.lumiToProcess:
476  import FWCore.PythonUtilities.LumiList as LumiList
477  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
478 
479  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
480  if self.process.source is None:
481  self.process.source=cms.Source("EmptySource")
482 
483  # modify source in case of run-dependent MC
484  self.runsAndWeights=None
485  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
486  if not self._options.isMC :
487  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
488  if self._options.runsAndWeightsForMC:
489  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
490  else:
491  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
492  if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
493  __import__(RunsAndWeights[self._options.runsScenarioForMC])
494  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
495  else:
496  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
497 
498  if self.runsAndWeights:
499  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
501  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
502  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
503 
504  # modify source in case of run-dependent MC (Run-3 method)
506  if self._options.runsAndWeightsForMCIntegerWeights or self._options.runsScenarioForMCIntegerWeights:
507  if not self._options.isMC :
508  raise Exception("options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
509  if self._options.runsAndWeightsForMCIntegerWeights:
510  self.runsAndWeightsInt = eval(self._options.runsAndWeightsForMCIntegerWeights)
511  else:
512  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
513  if isinstance(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights], str):
514  __import__(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights])
515  self.runsAndWeightsInt = sys.modules[RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
516  else:
517  self.runsAndWeightsInt = RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]
518 
519  if self.runsAndWeightsInt:
520  if not self._options.relval:
521  raise Exception("--relval option required when using --runsAndWeightsInt")
522  if 'DATAMIX' in self._options.step:
523  from SimGeneral.Configuration.LumiToRun import lumi_to_run
524  total_events, events_per_job = self._options.relval.split(',')
525  lumi_to_run_mapping = lumi_to_run(self.runsAndWeightsInt, int(total_events), int(events_per_job))
526  self.additionalCommands.append("process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " + str(lumi_to_run_mapping) + "])")
527 
528  return
529 
530  def addOutput(self):
531  """ Add output module to the process """
532  result=""
533  if self._options.outputDefinition:
534  if self._options.datatier:
535  print("--datatier & --eventcontent options ignored")
536 
537  #new output convention with a list of dict
538  outList = eval(self._options.outputDefinition)
539  for (id,outDefDict) in enumerate(outList):
540  outDefDictStr=outDefDict.__str__()
541  if not isinstance(outDefDict,dict):
542  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
543  #requires option: tier
544  theTier=anyOf(['t','tier','dataTier'],outDefDict)
545  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
546 
547  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
548  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
549  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
550  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
551  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
552  # module label has a particular role
553  if not theModuleLabel:
554  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
555  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
556  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
557  ]
558  for name in tryNames:
559  if not hasattr(self.process,name):
560  theModuleLabel=name
561  break
562  if not theModuleLabel:
563  raise Exception("cannot find a module label for specification: "+outDefDictStr)
564  if id==0:
565  defaultFileName=self._options.outfile_name
566  else:
567  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
568 
569  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
570  if not theFileName.endswith('.root'):
571  theFileName+='.root'
572 
573  if len(outDefDict):
574  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
575  if theStreamType=='DQMIO': theStreamType='DQM'
576  if theStreamType=='ALL':
577  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
578  else:
579  theEventContent = getattr(self.process, theStreamType+"EventContent")
580 
581 
582  addAlCaSelects=False
583  if theStreamType=='ALCARECO' and not theFilterName:
584  theFilterName='StreamALCACombined'
585  addAlCaSelects=True
586 
587  CppType='PoolOutputModule'
588  if self._options.timeoutOutput:
589  CppType='TimeoutPoolOutputModule'
590  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
591  output = cms.OutputModule(CppType,
592  theEventContent.clone(),
593  fileName = cms.untracked.string(theFileName),
594  dataset = cms.untracked.PSet(
595  dataTier = cms.untracked.string(theTier),
596  filterName = cms.untracked.string(theFilterName))
597  )
598  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
599  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
600  if not theSelectEvent and hasattr(self.process,'filtering_step'):
601  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
602  if theSelectEvent:
603  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
604 
605  if addAlCaSelects:
606  if not hasattr(output,'SelectEvents'):
607  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
608  for alca in self.AlCaPaths:
609  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
610 
611 
612  if hasattr(self.process,theModuleLabel):
613  raise Exception("the current process already has a module "+theModuleLabel+" defined")
614  #print "creating output module ",theModuleLabel
615  setattr(self.process,theModuleLabel,output)
616  outputModule=getattr(self.process,theModuleLabel)
617  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
618  path=getattr(self.process,theModuleLabel+'_step')
619  self.schedule.append(path)
620 
621  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
622  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"): return label
623  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
624  if theExtraOutputCommands:
625  if not isinstance(theExtraOutputCommands,list):
626  raise Exception("extra ouput command in --option must be a list of strings")
627  if hasattr(self.process,theStreamType+"EventContent"):
628  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
629  else:
630  outputModule.outputCommands.extend(theExtraOutputCommands)
631 
632  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
633 
634 
635  return result
636 
637  streamTypes=self._options.eventcontent.split(',')
638  tiers=self._options.datatier.split(',')
639  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
640  raise Exception("number of event content arguments does not match number of datatier arguments")
641 
642  # if the only step is alca we don't need to put in an output
643  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
644  return "\n"
645 
646  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
647  if streamType=='': continue
648  if streamType == 'ALCARECO' and not 'ALCAPRODUCER' in self._options.step: continue
649  if streamType=='DQMIO': streamType='DQM'
650  eventContent=streamType
651 
652  if streamType == "NANOEDMAOD" :
653  eventContent = "NANOAOD"
654  elif streamType == "NANOEDMAODSIM" :
655  eventContent = "NANOAODSIM"
656  theEventContent = getattr(self.process, eventContent+"EventContent")
657  if i==0:
658  theFileName=self._options.outfile_name
659  theFilterName=self._options.filtername
660  else:
661  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
662  theFilterName=self._options.filtername
663  CppType='PoolOutputModule'
664  if self._options.timeoutOutput:
665  CppType='TimeoutPoolOutputModule'
666  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
667  if "NANOAOD" in streamType : CppType='NanoAODOutputModule'
668  output = cms.OutputModule(CppType,
669  theEventContent,
670  fileName = cms.untracked.string(theFileName),
671  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
672  filterName = cms.untracked.string(theFilterName)
673  )
674  )
675  if hasattr(self.process,"generation_step") and streamType!='LHE':
676  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
677  if hasattr(self.process,"filtering_step"):
678  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
679 
680  if streamType=='ALCARECO':
681  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
682 
683  if "MINIAOD" in streamType:
684  from PhysicsTools.PatAlgos.slimming.miniAOD_tools import miniAOD_customizeOutput
686 
687  outputModuleName=streamType+'output'
688  setattr(self.process,outputModuleName,output)
689  outputModule=getattr(self.process,outputModuleName)
690  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
691  path=getattr(self.process,outputModuleName+'_step')
692  self.schedule.append(path)
693 
694  if self._options.outputCommands and streamType!='DQM':
695  for evct in self._options.outputCommands.split(','):
696  if not evct: continue
697  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
698 
699  if not self._options.inlineEventContent:
700  tmpstreamType=streamType
701  if "NANOEDM" in tmpstreamType :
702  tmpstreamType=tmpstreamType.replace("NANOEDM","NANO")
703  def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
704  return label
705  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
706 
707  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
708 
709  return result
710 
711  def addStandardSequences(self):
712  """
713  Add selected standard sequences to the process
714  """
715  # load the pile up file
716  if self._options.pileup:
717  pileupSpec=self._options.pileup.split(',')[0]
718 
719  # Does the requested pile-up scenario exist?
720  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
721  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
722  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
723  raise Exception(message)
724 
725  # Put mixing parameters in a dictionary
726  if '.' in pileupSpec:
727  mixingDict={'file':pileupSpec}
728  elif pileupSpec.startswith('file:'):
729  mixingDict={'file':pileupSpec[5:]}
730  else:
731  import copy
732  mixingDict=copy.copy(Mixing[pileupSpec])
733  if len(self._options.pileup.split(','))>1:
734  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
735 
736  # Load the pu cfg file corresponding to the requested pu scenario
737  if 'file:' in pileupSpec:
738  #the file is local
739  self.process.load(mixingDict['file'])
740  print("inlining mixing module configuration")
741  self._options.inlineObjets+=',mix'
742  else:
743  self.loadAndRemember(mixingDict['file'])
744 
745  mixingDict.pop('file')
746  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
747  if self._options.pileup_input:
748  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
749  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
750  elif self._options.pileup_input.startswith("filelist:"):
751  mixingDict['F']=(filesFromList(self._options.pileup_input[9:]))[0]
752  else:
753  mixingDict['F']=self._options.pileup_input.split(',')
754  specialization=defineMixing(mixingDict)
755  for command in specialization:
756  self.executeAndRemember(command)
757  if len(mixingDict)!=0:
758  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
759 
760 
761  # load the geometry file
762  try:
763  if len(self.stepMap):
764  self.loadAndRemember(self.GeometryCFF)
765  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
767  if self.geometryDBLabel:
768  self.executeAndRemember('if hasattr(process, "XMLFromDBSource"): process.XMLFromDBSource.label="%s"'%(self.geometryDBLabel))
769  self.executeAndRemember('if hasattr(process, "DDDetectorESProducerFromDB"): process.DDDetectorESProducerFromDB.label="%s"'%(self.geometryDBLabel))
770 
771  except ImportError:
772  print("Geometry option",self._options.geometry,"unknown.")
773  raise
774 
775  if len(self.stepMap):
776  self.loadAndRemember(self.magFieldCFF)
777 
778  for stepName in self.stepKeys:
779  stepSpec = self.stepMap[stepName]
780  print("Step:", stepName,"Spec:",stepSpec)
781  if stepName.startswith('re'):
782 
783  if stepName[2:] not in self._options.donotDropOnInput:
784  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
785  stepName=stepName[2:]
786  if stepSpec=="":
787  getattr(self,"prepare_"+stepName)(stepSpec = getattr(self,stepName+"DefaultSeq"))
788  elif isinstance(stepSpec, list):
789  getattr(self,"prepare_"+stepName)(stepSpec = '+'.join(stepSpec))
790  elif isinstance(stepSpec, tuple):
791  getattr(self,"prepare_"+stepName)(stepSpec = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
792  else:
793  raise ValueError("Invalid step definition")
794 
795  if self._options.restoreRNDSeeds!=False:
796  #it is either True, or a process name
797  if self._options.restoreRNDSeeds==True:
798  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
799  else:
800  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
801  if self._options.inputEventContent or self._options.inputCommands:
802  if self._options.inputCommands:
803  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
804  else:
805  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
806 
807 
808  def completeInputCommand(self):
809  if self._options.inputEventContent:
810  import copy
811  def dropSecondDropStar(iec):
812  #drop occurence of 'drop *' in the list
813  count=0
814  for item in iec:
815  if item=='drop *':
816  if count!=0:
817  iec.remove(item)
818  count+=1
819 
820 
821  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
822  for evct in self._options.inputEventContent.split(','):
823  if evct=='': continue
824  theEventContent = getattr(self.process, evct+"EventContent")
825  if hasattr(theEventContent,'outputCommands'):
826  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
827  if hasattr(theEventContent,'inputCommands'):
828  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
829 
830  dropSecondDropStar(self.process.source.inputCommands)
831 
832  if not self._options.dropDescendant:
833  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
834 
835 
836  return
837 
838  def addConditions(self):
839  """Add conditions to the process"""
840  if not self._options.conditions: return
841 
842  if 'FrontierConditions_GlobalTag' in self._options.conditions:
843  print('using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
844  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
845 
847  from Configuration.AlCa.GlobalTag import GlobalTag
848  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
849  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
850  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
851 
852 
853  def addCustomise(self,unsch=0):
854  """Include the customise code """
855 
856  custOpt=[]
857  if unsch==0:
858  for c in self._options.customisation_file:
859  custOpt.extend(c.split(","))
860  else:
861  for c in self._options.customisation_file_unsch:
862  custOpt.extend(c.split(","))
863 
864  custMap=DictTypes.SortedKeysDict()
865  for opt in custOpt:
866  if opt=='': continue
867  if opt.count('.')>1:
868  raise Exception("more than . in the specification:"+opt)
869  fileName=opt.split('.')[0]
870  if opt.count('.')==0: rest='customise'
871  else:
872  rest=opt.split('.')[1]
873  if rest=='py': rest='customise' #catch the case of --customise file.py
874 
875  if fileName in custMap:
876  custMap[fileName].extend(rest.split('+'))
877  else:
878  custMap[fileName]=rest.split('+')
879 
880  if len(custMap)==0:
881  final_snippet='\n'
882  else:
883  final_snippet='\n# customisation of the process.\n'
884 
885  allFcn=[]
886  for opt in custMap:
887  allFcn.extend(custMap[opt])
888  for fcn in allFcn:
889  if allFcn.count(fcn)!=1:
890  raise Exception("cannot specify twice "+fcn+" as a customisation method")
891 
892  for f in custMap:
893  # let python search for that package and do syntax checking at the same time
894  packageName = f.replace(".py","").replace("/",".")
895  __import__(packageName)
896  package = sys.modules[packageName]
897 
898  # now ask the package for its definition and pick .py instead of .pyc
899  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
900 
901  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
902  if self._options.inline_custom:
903  for line in file(customiseFile,'r'):
904  if "import FWCore.ParameterSet.Config" in line:
905  continue
906  final_snippet += line
907  else:
908  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
909  for fcn in custMap[f]:
910  print("customising the process with",fcn,"from",f)
911  if not hasattr(package,fcn):
912  #bound to fail at run time
913  raise Exception("config "+f+" has no function "+fcn)
914  #execute the command
915  self.process=getattr(package,fcn)(self.process)
916  #and print it in the configuration
917  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
918  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
919 
920  if len(custMap)!=0:
921  final_snippet += '\n# End of customisation functions\n'
922 
923 
924  return final_snippet
925 
926  def addCustomiseCmdLine(self):
927  final_snippet='\n# Customisation from command line\n'
928  if self._options.customise_commands:
929  import string
930  for com in self._options.customise_commands.split('\\n'):
931  com=com.lstrip()
932  self.executeAndRemember(com)
933  final_snippet +='\n'+com
934 
935  return final_snippet
936 
937  #----------------------------------------------------------------------------
938  # here the methods to define the python includes for each step or
939  # conditions
940  #----------------------------------------------------------------------------
941  def define_Configs(self):
942  if len(self.stepMap):
943  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
944  if self._options.particleTable not in defaultOptions.particleTableList:
945  print('Invalid particle table provided. Options are:')
946  print(defaultOptions.particleTable)
947  sys.exit(-1)
948  else:
949  if len(self.stepMap):
950  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
951 
952  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
953 
954  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
955  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
956  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
957  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
958  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
959  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
960  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
961  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
962  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
963  if self._options.isRepacked: self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_DataMapper_cff"
964  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
965  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
966  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
967  self.RECOSIMDefaultCFF="Configuration/StandardSequences/RecoSim_cff"
968  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
969  self.NANODefaultCFF="PhysicsTools/NanoAOD/nano_cff"
970  self.NANOGENDefaultCFF="PhysicsTools/NanoAOD/nanogen_cff"
971  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
972  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
973  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
974  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
975  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
976  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
977  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
978  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
979  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
980  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
981  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
983  if "DATAMIX" in self.stepMap.keys():
984  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
985  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
986  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
987  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
988 
989  self.ALCADefaultSeq=None
990  self.LHEDefaultSeq='externalLHEProducer'
991  self.GENDefaultSeq='pgen'
992  self.SIMDefaultSeq='psim'
993  self.DIGIDefaultSeq='pdigi'
995  self.DIGI2RAWDefaultSeq='DigiToRaw'
996  self.HLTDefaultSeq='GRun'
997  self.L1DefaultSeq=None
1002  self.RAW2DIGIDefaultSeq='RawToDigi'
1003  self.L1RecoDefaultSeq='L1Reco'
1004  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
1005  if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
1006  self.RECODefaultSeq='reconstruction'
1007  else:
1008  self.RECODefaultSeq='reconstruction_fromRECO'
1009  self.RECOSIMDefaultSeq='recosim'
1011  self.L1HwValDefaultSeq='L1HwVal'
1012  self.DQMDefaultSeq='DQMOffline'
1014  self.ENDJOBDefaultSeq='endOfProcess'
1015  self.REPACKDefaultSeq='DigiToRawRepack'
1016  self.PATDefaultSeq='miniAOD'
1017  self.PATGENDefaultSeq='miniGEN'
1018  #TODO: Check based of file input
1019  self.NANOGENDefaultSeq='nanogenSequence'
1020  self.NANODefaultSeq='nanoSequence'
1022  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
1024  if not self._options.beamspot:
1025  self._options.beamspot=VtxSmearedDefaultKey
1026 
1027  # if its MC then change the raw2digi
1028  if self._options.isMC==True:
1029  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
1030  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
1031  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
1032  self.PATGENDefaultCFF="Configuration/StandardSequences/PATGEN_cff"
1033  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
1034  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
1035  self.NANODefaultSeq='nanoSequenceMC'
1036  else:
1037  self._options.beamspot = None
1038 
1039  #patch for gen, due to backward incompatibility
1040  if 'reGEN' in self.stepMap:
1041  self.GENDefaultSeq='fixGenInfo'
1042 
1043  if self._options.scenario=='cosmics':
1044  self._options.pileup='Cosmics'
1045  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1046  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1047  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1048  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1049  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1050  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1051  if self._options.isMC==True:
1052  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1053  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1054  self.RECODefaultSeq='reconstructionCosmics'
1055  self.DQMDefaultSeq='DQMOfflineCosmics'
1056 
1057  if self._options.scenario=='HeavyIons':
1058  if not self._options.beamspot:
1059  self._options.beamspot=VtxSmearedHIDefaultKey
1060  self.HLTDefaultSeq = 'HIon'
1061  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1062  self.VALIDATIONDefaultSeq=''
1063  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1064  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1065  self.RECODefaultSeq='reconstructionHeavyIons'
1066  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1067  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1068  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1069  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1070  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1071  if self._options.isMC==True:
1072  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1073 
1074 
1077  self.USERDefaultSeq='user'
1078  self.USERDefaultCFF=None
1080  # the magnetic field
1081  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1082  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1083 
1084  # the geometry
1085  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1087  simGeometry=''
1088  if self._options.fast:
1089  if 'start' in self._options.conditions.lower():
1090  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1091  else:
1092  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1093  else:
1094  def inGeometryKeys(opt):
1095  from Configuration.StandardSequences.GeometryConf import GeometryConf
1096  if opt in GeometryConf:
1097  return GeometryConf[opt]
1098  else:
1099  return opt
1100 
1101  geoms=self._options.geometry.split(',')
1102  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1103  if len(geoms)==2:
1104  #may specify the reco geometry
1105  if '/' in geoms[1] or '_cff' in geoms[1]:
1106  self.GeometryCFF=geoms[1]
1107  else:
1108  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1109 
1110  if (geoms[0].startswith('DB:')):
1111  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1112  self.geometryDBLabel=geoms[0][3:]
1113  print("with DB:")
1114  else:
1115  if '/' in geoms[0] or '_cff' in geoms[0]:
1116  self.SimGeometryCFF=geoms[0]
1117  else:
1118  simGeometry=geoms[0]
1119  if self._options.gflash==True:
1120  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1121  else:
1122  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1123 
1124  # synchronize the geometry configuration and the FullSimulation sequence to be used
1125  if simGeometry not in defaultOptions.geometryExtendedOptions:
1126  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1127 
1128  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1129  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1130  self._options.beamspot='NoSmear'
1131 
1132  # fastsim requires some changes to the default cff files and sequences
1133  if self._options.fast:
1134  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1135  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1136  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1137  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1138  self.NANODefaultSeq = 'nanoSequenceFS'
1139  self.DQMOFFLINEDefaultCFF="DQMOffline.Configuration.DQMOfflineFS_cff"
1140 
1141  # Mixing
1142  if self._options.pileup=='default':
1143  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1144  self._options.pileup=MixingDefaultKey
1145 
1146 
1147  #not driven by a default cff anymore
1148  if self._options.isData:
1149  self._options.pileup=None
1150 
1151 
1154  # for alca, skims, etc
1155  def addExtraStream(self, name, stream, workflow='full'):
1156  # define output module and go from there
1157  output = cms.OutputModule("PoolOutputModule")
1158  if stream.selectEvents.parameters_().__len__()!=0:
1159  output.SelectEvents = stream.selectEvents
1160  else:
1161  output.SelectEvents = cms.untracked.PSet()
1162  output.SelectEvents.SelectEvents=cms.vstring()
1163  if isinstance(stream.paths,tuple):
1164  for path in stream.paths:
1165  output.SelectEvents.SelectEvents.append(path.label())
1166  else:
1167  output.SelectEvents.SelectEvents.append(stream.paths.label())
1168 
1169 
1170 
1171  if isinstance(stream.content,str):
1172  evtPset=getattr(self.process,stream.content)
1173  for p in evtPset.parameters_():
1174  setattr(output,p,getattr(evtPset,p))
1175  if not self._options.inlineEventContent:
1176  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1177  return label
1178  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1179  else:
1180  output.outputCommands = stream.content
1181 
1182 
1183  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1184 
1185  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1186  filterName = cms.untracked.string(stream.name))
1187 
1188  if self._options.filtername:
1189  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1190 
1191  #add an automatic flushing to limit memory consumption
1192  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1193 
1194  if workflow in ("producers,full"):
1195  if isinstance(stream.paths,tuple):
1196  for path in stream.paths:
1197  self.schedule.append(path)
1198  else:
1199  self.schedule.append(stream.paths)
1200 
1201 
1202  # in case of relvals we don't want to have additional outputs
1203  if (not self._options.relval) and workflow in ("full","output"):
1204  self.additionalOutputs[name] = output
1205  setattr(self.process,name,output)
1206 
1207  if workflow == 'output':
1208  # adjust the select events to the proper trigger results from previous process
1209  filterList = output.SelectEvents.SelectEvents
1210  for i, filter in enumerate(filterList):
1211  filterList[i] = filter+":"+self._options.triggerResultsProcess
1212 
1213  return output
1214 
1215  #----------------------------------------------------------------------------
1216  # here the methods to create the steps. Of course we are doing magic here ;)
1217  # prepare_STEPNAME modifies self.process and what else's needed.
1218  #----------------------------------------------------------------------------
1219 
1220  def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ=''):
1221  _dotsplit = stepSpec.split('.')
1222  if ( len(_dotsplit)==1 ):
1223  if '/' in _dotsplit[0]:
1224  _sequence = defaultSEQ if defaultSEQ else stepSpec
1225  _cff = _dotsplit[0]
1226  else:
1227  _sequence = stepSpec
1228  _cff = defaultCFF
1229  elif ( len(_dotsplit)==2 ):
1230  _cff,_sequence = _dotsplit
1231  else:
1232  print("sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1233  print(stepSpec,"not recognized")
1234  raise
1235  l=self.loadAndRemember(_cff)
1236  return l,_sequence,_cff
1237 
1238  def scheduleSequence(self,seq,prefix,what='Path'):
1239  if '*' in seq:
1240  #create only one path with all sequences in it
1241  for i,s in enumerate(seq.split('*')):
1242  if i==0:
1243  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1244  else:
1245  p=getattr(self.process,prefix)
1246  tmp = getattr(self.process, s)
1247  if isinstance(tmp, cms.Task):
1248  p.associate(tmp)
1249  else:
1250  p+=tmp
1251  self.schedule.append(getattr(self.process,prefix))
1252  return
1253  else:
1254  #create as many path as many sequences
1255  if not '+' in seq:
1256  if self.nextScheduleIsConditional:
1257  self.conditionalPaths.append(prefix)
1258  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1259  self.schedule.append(getattr(self.process,prefix))
1260  else:
1261  for i,s in enumerate(seq.split('+')):
1262  sn=prefix+'%d'%(i)
1263  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1264  self.schedule.append(getattr(self.process,sn))
1265  return
1266 
1267  def scheduleSequenceAtEnd(self,seq,prefix):
1268  self.scheduleSequence(seq,prefix,what='EndPath')
1269  return
1270 
1271  def prepare_ALCAPRODUCER(self, stepSpec = None):
1272  self.prepare_ALCA(stepSpec, workflow = "producers")
1273 
1274  def prepare_ALCAOUTPUT(self, stepSpec = None):
1275  self.prepare_ALCA(stepSpec, workflow = "output")
1276 
1277  def prepare_ALCA(self, stepSpec = None, workflow = 'full'):
1278  """ Enrich the process with alca streams """
1279  alcaConfig,sequence,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.ALCADefaultCFF)
1280 
1281  MAXLEN=31 #the alca producer name should be shorter than 31 chars as per https://cms-talk.web.cern.ch/t/alcaprompt-datasets-not-loaded-in-dbs/11146/2
1282  # decide which ALCA paths to use
1283  alcaList = sequence.split("+")
1284  for alca in alcaList:
1285  if (len(alca)>MAXLEN):
1286  raise Exception("The following alca "+str(alca)+" name (with length "+str(len(alca))+" chars) cannot be accepted because it exceeds the DBS constraints on the length of the name of the ALCARECOs producers ("+str(MAXLEN)+")!")
1287 
1288  maxLevel=0
1289  from Configuration.AlCa.autoAlca import autoAlca, AlCaNoConcurrentLumis
1290  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1291  self.expandMapping(alcaList,autoAlca)
1292  self.AlCaPaths=[]
1293  for name in alcaConfig.__dict__:
1294  alcastream = getattr(alcaConfig,name)
1295  shortName = name.replace('ALCARECOStream','')
1296  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1297  if shortName in AlCaNoConcurrentLumis:
1298  print("Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".format(shortName))
1299  self._options.nConcurrentLumis = "1"
1300  self._options.nConcurrentIOVs = "1"
1301  output = self.addExtraStream(name,alcastream, workflow = workflow)
1302  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1303  self.AlCaPaths.append(shortName)
1304  if 'DQM' in alcaList:
1305  if not self._options.inlineEventContent and hasattr(self.process,name):
1306  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1307  else:
1308  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1309 
1310  #rename the HLT process name in the alca modules
1311  if self._options.hltProcess or 'HLT' in self.stepMap:
1312  if isinstance(alcastream.paths,tuple):
1313  for path in alcastream.paths:
1314  self.renameHLTprocessInSequence(path.label())
1315  else:
1316  self.renameHLTprocessInSequence(alcastream.paths.label())
1317 
1318  for i in range(alcaList.count(shortName)):
1319  alcaList.remove(shortName)
1320 
1321  # DQM needs a special handling
1322  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1323  path = getattr(alcaConfig,name)
1324  self.schedule.append(path)
1325  alcaList.remove('DQM')
1326 
1327  if isinstance(alcastream,cms.Path):
1328  #black list the alca path so that they do not appear in the cfg
1329  self.blacklist_paths.append(alcastream)
1330 
1331 
1332  if len(alcaList) != 0:
1333  available=[]
1334  for name in alcaConfig.__dict__:
1335  alcastream = getattr(alcaConfig,name)
1336  if isinstance(alcastream,cms.FilteredStream):
1337  available.append(name.replace('ALCARECOStream',''))
1338  print("The following alcas could not be found "+str(alcaList))
1339  print("available ",available)
1340  #print "verify your configuration, ignoring for now"
1341  raise Exception("The following alcas could not be found "+str(alcaList))
1342 
1343  def prepare_LHE(self, stepSpec = None):
1344  #load the fragment
1345 
1346  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1347  print("Loading lhe fragment from",loadFragment)
1348  __import__(loadFragment)
1349  self.process.load(loadFragment)
1350 
1351  self._options.inlineObjets+=','+stepSpec
1352 
1353  getattr(self.process,stepSpec).nEvents = int(self._options.number)
1354 
1355  #schedule it
1356  self.process.lhe_step = cms.Path( getattr( self.process,stepSpec) )
1357  self.excludedPaths.append("lhe_step")
1358  self.schedule.append( self.process.lhe_step )
1359 
1360  def prepare_GEN(self, stepSpec = None):
1361  """ load the fragment of generator configuration """
1362  loadFailure=False
1363  #remove trailing .py
1364  #support old style .cfi by changing into something.cfi into something_cfi
1365  #remove python/ from the name
1366  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1367  #standard location of fragments
1368  if not '/' in loadFragment:
1369  loadFragment='Configuration.Generator.'+loadFragment
1370  else:
1371  loadFragment=loadFragment.replace('/','.')
1372  try:
1373  print("Loading generator fragment from",loadFragment)
1374  __import__(loadFragment)
1375  except:
1376  loadFailure=True
1377  #if self.process.source and self.process.source.type_()=='EmptySource':
1378  if not (self._options.filein or self._options.dasquery):
1379  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1380 
1381  if not loadFailure:
1382  from Configuration.Generator.concurrentLumisDisable import noConcurrentLumiGenerators
1383 
1384  generatorModule=sys.modules[loadFragment]
1385  genModules=generatorModule.__dict__
1386  #remove lhe producer module since this should have been
1387  #imported instead in the LHE step
1388  if self.LHEDefaultSeq in genModules:
1389  del genModules[self.LHEDefaultSeq]
1390 
1391  if self._options.hideGen:
1392  self.loadAndRemember(loadFragment)
1393  else:
1394  self.process.load(loadFragment)
1395  # expose the objects from that fragment to the configuration
1396  import FWCore.ParameterSet.Modules as cmstypes
1397  for name in genModules:
1398  theObject = getattr(generatorModule,name)
1399  if isinstance(theObject, cmstypes._Module):
1400  self._options.inlineObjets=name+','+self._options.inlineObjets
1401  if theObject.type_() in noConcurrentLumiGenerators:
1402  print("Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".format(theObject.type_()))
1403  self._options.nConcurrentLumis = "1"
1404  self._options.nConcurrentIOVs = "1"
1405  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1406  self._options.inlineObjets+=','+name
1407 
1408  if stepSpec == self.GENDefaultSeq or stepSpec == 'pgen_genonly' or stepSpec == 'pgen_smear':
1409  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1410  self.productionFilterSequence = 'ProductionFilterSequence'
1411  elif 'generator' in genModules:
1412  self.productionFilterSequence = 'generator'
1413 
1414  """ Enrich the schedule with the rest of the generation step """
1415  _,_genSeqName,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.GENDefaultCFF)
1416 
1417  if True:
1418  try:
1419  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1420  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1421  self.loadAndRemember(cffToBeLoaded)
1422  except ImportError:
1423  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1424 
1425  if self._options.scenario == 'HeavyIons':
1426  if self._options.pileup=='HiMixGEN':
1427  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1428  elif self._options.pileup=='HiMixEmbGEN':
1429  self.loadAndRemember("Configuration/StandardSequences/GeneratorEmbMix_cff")
1430  else:
1431  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1432 
1433  self.process.generation_step = cms.Path( getattr(self.process,_genSeqName) )
1434  self.schedule.append(self.process.generation_step)
1435 
1436  #register to the genstepfilter the name of the path (static right now, but might evolve)
1437  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1438 
1439  if 'reGEN' in self.stepMap:
1440  #stop here
1441  return
1442 
1443  """ Enrich the schedule with the summary of the filter step """
1444  #the gen filter in the endpath
1445  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1446  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1447  return
1448 
1449  def prepare_SIM(self, stepSpec = None):
1450  """ Enrich the schedule with the simulation step"""
1451  _,_simSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.SIMDefaultCFF)
1452  if not self._options.fast:
1453  if self._options.gflash==True:
1454  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1455 
1456  if self._options.magField=='0T':
1457  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1458  else:
1459  if self._options.magField=='0T':
1460  self.executeAndRemember("process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1461 
1462  self.scheduleSequence(_simSeq,'simulation_step')
1463  return
1464 
1465  def prepare_DIGI(self, stepSpec = None):
1466  """ Enrich the schedule with the digitisation step"""
1467  _,_digiSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DIGIDefaultCFF)
1468 
1469  if self._options.gflash==True:
1470  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1471 
1472  if _digiSeq == 'pdigi_valid' or _digiSeq == 'pdigi_hi':
1473  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1474 
1475  if _digiSeq != 'pdigi_nogen' and _digiSeq != 'pdigi_valid_nogen' and _digiSeq != 'pdigi_hi_nogen' and not self.process.source.type_()=='EmptySource' and not self._options.filetype == "LHE":
1476  if self._options.inputEventContent=='':
1477  self._options.inputEventContent='REGEN'
1478  else:
1479  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1480 
1481 
1482  self.scheduleSequence(_digiSeq,'digitisation_step')
1483  return
1484 
1485  def prepare_CFWRITER(self, stepSpec = None):
1486  """ Enrich the schedule with the crossing frame writer step"""
1488  self.scheduleSequence('pcfw','cfwriter_step')
1489  return
1490 
1491  def prepare_DATAMIX(self, stepSpec = None):
1492  """ Enrich the schedule with the digitisation step"""
1494  self.scheduleSequence('pdatamix','datamixing_step')
1495 
1496  if self._options.pileup_input:
1497  theFiles=''
1498  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1499  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1500  elif self._options.pileup_input.startswith("filelist:"):
1501  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1502  else:
1503  theFiles=self._options.pileup_input.split(',')
1504  #print theFiles
1505  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1506 
1507  return
1508 
1509  def prepare_DIGI2RAW(self, stepSpec = None):
1510  _,_digi2rawSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DIGI2RAWDefaultCFF)
1511  self.scheduleSequence(_digi2rawSeq,'digi2raw_step')
1512  return
1513 
1514  def prepare_REPACK(self, stepSpec = None):
1515  _,_repackSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.REPACKDefaultCFF)
1516  self.scheduleSequence(_repackSeq,'digi2repack_step')
1517  return
1518 
1519  def prepare_L1(self, stepSpec = None):
1520  """ Enrich the schedule with the L1 simulation step"""
1521  assert(stepSpec == None)
1522  self.loadAndRemember(self.L1EMDefaultCFF)
1523  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1524  return
1525 
1526  def prepare_L1REPACK(self, stepSpec = None):
1527  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1528  supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT','CalouGT']
1529  if stepSpec in supported:
1530  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'% stepSpec)
1531  if self._options.scenario == 'HeavyIons':
1532  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1533  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1534  else:
1535  print("L1REPACK with '",stepSpec,"' is not supported! Supported choices are: ",supported)
1536  raise Exception('unsupported feature')
1537 
1538  def prepare_HLT(self, stepSpec = None):
1539  """ Enrich the schedule with the HLT simulation step"""
1540  if not stepSpec:
1541  print("no specification of the hlt menu has been given, should never happen")
1542  raise Exception('no HLT specifications provided')
1543 
1544  if '@' in stepSpec:
1545  # case where HLT:@something was provided
1546  from Configuration.HLT.autoHLT import autoHLT
1547  key = stepSpec[1:]
1548  if key in autoHLT:
1549  stepSpec = autoHLT[key]
1550  else:
1551  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1552 
1553  if ',' in stepSpec:
1554  #case where HLT:something:something was provided
1555  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1556  optionsForHLT = {}
1557  if self._options.scenario == 'HeavyIons':
1558  optionsForHLT['type'] = 'HIon'
1559  else:
1560  optionsForHLT['type'] = 'GRun'
1561  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.items())
1562  if stepSpec == 'run,fromSource':
1563  if hasattr(self.process.source,'firstRun'):
1564  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1565  elif hasattr(self.process.source,'setRunNumber'):
1566  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1567  else:
1568  raise Exception(f'Cannot replace menu to load {stepSpec}')
1569  else:
1570  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(stepSpec.replace(',',':'),optionsForHLTConfig))
1571  else:
1572  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % stepSpec)
1573 
1574  if self._options.isMC:
1575  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1576 
1577  if self._options.name != 'HLT':
1578  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1579  self.additionalCommands.append('process = ProcessName(process)')
1580  self.additionalCommands.append('')
1581  from HLTrigger.Configuration.CustomConfigs import ProcessName
1582  self.process = ProcessName(self.process)
1583 
1584  if self.process.schedule == None:
1585  raise Exception('the HLT step did not attach a valid schedule to the process')
1586 
1587  self.scheduleIndexOfFirstHLTPath = len(self.schedule)
1588  [self.blacklist_paths.append(path) for path in self.process.schedule if isinstance(path,(cms.Path,cms.EndPath))]
1589 
1590  # this is a fake, to be removed with fastim migration and HLT menu dump
1591  if self._options.fast:
1592  if not hasattr(self.process,'HLTEndSequence'):
1593  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1594 
1595 
1596  def prepare_RAW2RECO(self, stepSpec = None):
1597  if ','in stepSpec:
1598  seqReco,seqDigi=stepSpec.spli(',')
1599  else:
1600  print(f"RAW2RECO requires two specifications {stepSpec} insufficient")
1601 
1602  self.prepare_RAW2DIGI(seqDigi)
1603  self.prepare_RECO(seqReco)
1604  return
1605 
1606  def prepare_RAW2DIGI(self, stepSpec = "RawToDigi"):
1607  _,_raw2digiSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RAW2DIGIDefaultCFF)
1608  self.scheduleSequence(_raw2digiSeq,'raw2digi_step')
1609  return
1610 
1611  def prepare_PATFILTER(self, stepSpec = None):
1612  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1613  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1614  for filt in allMetFilterPaths:
1615  self.schedule.append(getattr(self.process,'Flag_'+filt))
1616 
1617  def prepare_L1HwVal(self, stepSpec = 'L1HwVal'):
1618  ''' Enrich the schedule with L1 HW validation '''
1619  self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1HwValDefaultCFF)
1620  print('\n\n\n DEPRECATED this has no action \n\n\n')
1621  return
1622 
1623  def prepare_L1Reco(self, stepSpec = "L1Reco"):
1624  ''' Enrich the schedule with L1 reconstruction '''
1625  _,_l1recoSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1RecoDefaultCFF)
1626  self.scheduleSequence(_l1recoSeq,'L1Reco_step')
1627  return
1628 
1629  def prepare_L1TrackTrigger(self, stepSpec = "L1TrackTrigger"):
1630  ''' Enrich the schedule with L1 reconstruction '''
1631  _,_l1tracktriggerSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1TrackTriggerDefaultCFF)
1632  self.scheduleSequence(_l1tracktriggerSeq,'L1TrackTrigger_step')
1633  return
1634 
1635  def prepare_FILTER(self, stepSpec = None):
1636  ''' Enrich the schedule with a user defined filter sequence '''
1637 
1638  filterConfig,filterSeq = stepSpec.split('.')
1639  filterConfig=self.load(filterConfig)
1640 
1641  class PrintAllModules(object):
1642  def __init__(self):
1643  self.inliner=''
1644  pass
1645  def enter(self,visitee):
1646  try:
1647  label=visitee.label()
1648 
1649  self.inliner=label+','+self.inliner
1650  except:
1651  pass
1652  def leave(self,v): pass
1653 
1654  expander=PrintAllModules()
1655  getattr(self.process,filterSeq).visit( expander )
1656  self._options.inlineObjets+=','+expander.inliner
1657  self._options.inlineObjets+=','+filterSeq
1658 
1659 
1660  self.scheduleSequence(filterSeq,'filtering_step')
1661  self.nextScheduleIsConditional=True
1662 
1663  self.productionFilterSequence = filterSeq
1664 
1665  return
1666 
1667  def prepare_RECO(self, stepSpec = "reconstruction"):
1668  ''' Enrich the schedule with reconstruction '''
1669  _,_recoSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RECODefaultCFF)
1670  self.scheduleSequence(_recoSeq,'reconstruction_step')
1671  return
1672 
1673  def prepare_RECOSIM(self, stepSpec = "recosim"):
1674  ''' Enrich the schedule with reconstruction '''
1675  _,_recosimSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RECOSIMDefaultCFF)
1676  self.scheduleSequence(_recosimSeq,'recosim_step')
1677  return
1678 
1679  def prepare_RECOBEFMIX(self, stepSpec = "reconstruction"):
1680  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1681  if not self._options.fast:
1682  print("ERROR: this step is only implemented for FastSim")
1683  sys.exit()
1684  _,_recobefmixSeq,_ = self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1685  self.scheduleSequence(_recobefmixSeq,'reconstruction_befmix_step')
1686  return
1687 
1688  def prepare_PAT(self, stepSpec = "miniAOD"):
1689  ''' Enrich the schedule with PAT '''
1690  self.prepare_PATFILTER(self)
1691  self.loadDefaultOrSpecifiedCFF(stepSpec,self.PATDefaultCFF)
1692  self.labelsToAssociate.append('patTask')
1693  if self._options.isData:
1694  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1695  else:
1696  if self._options.fast:
1697  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1698  else:
1699  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1700 
1701  if self._options.hltProcess:
1702  if len(self._options.customise_commands) > 1:
1703  self._options.customise_commands = self._options.customise_commands + " \n"
1704  self._options.customise_commands = self._options.customise_commands + "process.patTrigger.processName = \""+self._options.hltProcess+"\"\n"
1705  self._options.customise_commands = self._options.customise_commands + "process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1706  self._options.customise_commands = self._options.customise_commands + "process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1707 
1708 # self.renameHLTprocessInSequence(sequence)
1709 
1710  return
1711 
1712  def prepare_PATGEN(self, stepSpec = "miniGEN"):
1713  ''' Enrich the schedule with PATGEN '''
1714  self.loadDefaultOrSpecifiedCFF(stepSpec,self.PATGENDefaultCFF) #this is unscheduled
1715  self.labelsToAssociate.append('patGENTask')
1716  if self._options.isData:
1717  raise Exception("PATGEN step can only run on MC")
1718  return
1719 
1720  def prepare_NANO(self, stepSpec = '' ):
1721  print(f"in prepare_nano {stepSpec}")
1722  ''' Enrich the schedule with NANO '''
1723  _,_nanoSeq,_nanoCff = self.loadDefaultOrSpecifiedCFF(stepSpec,self.NANODefaultCFF,self.NANODefaultSeq)
1724  self.scheduleSequence(_nanoSeq,'nanoAOD_step')
1725  custom = "nanoAOD_customizeData" if self._options.isData else "nanoAOD_customizeMC"
1726  self._options.customisation_file.insert(0,'.'.join([_nanoCff,custom]))
1727  if self._options.hltProcess:
1728  if len(self._options.customise_commands) > 1:
1729  self._options.customise_commands = self._options.customise_commands + " \n"
1730  self._options.customise_commands = self._options.customise_commands + "process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1731 
1732  def prepare_NANOGEN(self, stepSpec = "nanoAOD"):
1733  ''' Enrich the schedule with NANOGEN '''
1734  # TODO: Need to modify this based on the input file type
1735  fromGen = any([x in self.stepMap for x in ['LHE', 'GEN', 'AOD']])
1736  _,_nanogenSeq,_nanogenCff = self.loadDefaultOrSpecifiedCFF(stepSpec,self.NANOGENDefaultCFF)
1737  self.scheduleSequence(_nanogenSeq,'nanoAOD_step')
1738  custom = "customizeNanoGEN" if fromGen else "customizeNanoGENFromMini"
1739  if self._options.runUnscheduled:
1740  self._options.customisation_file_unsch.insert(0, '.'.join([_nanogenCff, custom]))
1741  else:
1742  self._options.customisation_file.insert(0, '.'.join([_nanogenCff, custom]))
1743 
1744  def prepare_SKIM(self, stepSpec = "all"):
1745  ''' Enrich the schedule with skimming fragments'''
1746  skimConfig,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.SKIMDefaultCFF)
1747 
1748  skimlist=sequence.split('+')
1749 
1750  from Configuration.Skimming.autoSkim import autoSkim
1751  self.expandMapping(skimlist,autoSkim)
1752 
1753  #print "dictionnary for skims:",skimConfig.__dict__
1754  for skim in skimConfig.__dict__:
1755  skimstream = getattr(skimConfig,skim)
1756  if isinstance(skimstream,cms.Path):
1757  #black list the alca path so that they do not appear in the cfg
1758  self.blacklist_paths.append(skimstream)
1759  if (not isinstance(skimstream,cms.FilteredStream)):
1760  continue
1761  shortname = skim.replace('SKIMStream','')
1762  if (sequence=="all"):
1763  self.addExtraStream(skim,skimstream)
1764  elif (shortname in skimlist):
1765  self.addExtraStream(skim,skimstream)
1766  #add a DQM eventcontent for this guy
1767  if self._options.datatier=='DQM':
1768  self.process.load(self.EVTCONTDefaultCFF)
1769  skimstreamDQM = cms.FilteredStream(
1770  responsible = skimstream.responsible,
1771  name = skimstream.name+'DQM',
1772  paths = skimstream.paths,
1773  selectEvents = skimstream.selectEvents,
1774  content = self._options.datatier+'EventContent',
1775  dataTier = cms.untracked.string(self._options.datatier)
1776  )
1777  self.addExtraStream(skim+'DQM',skimstreamDQM)
1778  for i in range(skimlist.count(shortname)):
1779  skimlist.remove(shortname)
1780 
1781 
1782 
1783  if (skimlist.__len__()!=0 and sequence!="all"):
1784  print('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1785  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1786 
1787  def prepare_USER(self, stepSpec = None):
1788  ''' Enrich the schedule with a user defined sequence '''
1789  _,_userSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.USERDefaultCFF)
1790  self.scheduleSequence(_userSeq,'user_step')
1791  return
1792 
1793  def prepare_POSTRECO(self, stepSpec = None):
1794  """ Enrich the schedule with the postreco step """
1796  self.scheduleSequence('postreco_generator','postreco_step')
1797  return
1798 
1799 
1800  def prepare_VALIDATION(self, stepSpec = 'validation'):
1801  print(f"{stepSpec} in preparing validation")
1802  _,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.VALIDATIONDefaultCFF)
1803  from Validation.Configuration.autoValidation import autoValidation
1804  #in case VALIDATION:something:somethingelse -> something,somethingelse
1805  if sequence.find(',')!=-1:
1806  prevalSeqName=sequence.split(',')[0].split('+')
1807  valSeqName=sequence.split(',')[1].split('+')
1808  self.expandMapping(prevalSeqName,autoValidation,index=0)
1809  self.expandMapping(valSeqName,autoValidation,index=1)
1810  else:
1811  if '@' in sequence:
1812  prevalSeqName=sequence.split('+')
1813  valSeqName=sequence.split('+')
1814  self.expandMapping(prevalSeqName,autoValidation,index=0)
1815  self.expandMapping(valSeqName,autoValidation,index=1)
1816  else:
1817  postfix=''
1818  if sequence:
1819  postfix='_'+sequence
1820  prevalSeqName=['prevalidation'+postfix]
1821  valSeqName=['validation'+postfix]
1822  if not hasattr(self.process,valSeqName[0]):
1823  prevalSeqName=['']
1824  valSeqName=[sequence]
1825 
1826  def NFI(index):
1827 
1828  if index==0:
1829  return ''
1830  else:
1831  return '%s'%index
1832 
1833 
1834  #rename the HLT process in validation steps
1835  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1836  for s in valSeqName+prevalSeqName:
1837  if s:
1839  for (i,s) in enumerate(prevalSeqName):
1840  if s:
1841  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1842  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1843 
1844  for (i,s) in enumerate(valSeqName):
1845  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1846  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1847 
1848  #needed in case the miniAODValidation sequence is run starting from AODSIM
1849  if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1850  return
1851 
1852  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1853  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1854  self._options.restoreRNDSeeds=True
1855 
1856  if not 'DIGI' in self.stepMap and not self._options.isData and not self._options.fast:
1857  self.executeAndRemember("process.mix.playback = True")
1858  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1859  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1860  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1861 
1862  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1863  #will get in the schedule, smoothly
1864  for (i,s) in enumerate(valSeqName):
1865  getattr(self.process,'validation_step%s'%NFI(i)).insert(0, self.process.genstepfilter)
1866 
1867  return
1868 
1869 
1871  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1872  It will climb down within PSets, VPSets and VInputTags to find its target"""
1873  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1874  self._paramReplace = paramReplace
1875  self._paramSearch = paramSearch
1876  self._verbose = verbose
1877  self._whitelist = whitelist
1879  def doIt(self,pset,base):
1880  if isinstance(pset, cms._Parameterizable):
1881  for name in pset.parameters_().keys():
1882  # skip whitelisted parameters
1883  if name in self._whitelist:
1884  continue
1885  # if I use pset.parameters_().items() I get copies of the parameter values
1886  # so I can't modify the nested pset
1887  value = getattr(pset,name)
1888  type = value.pythonTypeName()
1889  if type in ('cms.PSet', 'cms.untracked.PSet'):
1890  self.doIt(value,base+"."+name)
1891  elif type in ('cms.VPSet', 'cms.untracked.VPSet'):
1892  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1893  elif type in ('cms.string', 'cms.untracked.string'):
1894  if value.value() == self._paramSearch:
1895  if self._verbose: print("set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace))
1896  setattr(pset, name,self._paramReplace)
1897  elif type in ('cms.VInputTag', 'cms.untracked.VInputTag'):
1898  for (i,n) in enumerate(value):
1899  if not isinstance(n, cms.InputTag):
1900  n=cms.InputTag(n)
1901  if n.processName == self._paramSearch:
1902  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1903  if self._verbose:print("set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace))
1904  setattr(n,"processName",self._paramReplace)
1905  value[i]=n
1906  elif type in ('cms.vstring', 'cms.untracked.vstring'):
1907  for (i,n) in enumerate(value):
1908  if n==self._paramSearch:
1909  getattr(pset,name)[i]=self._paramReplace
1910  elif type in ('cms.InputTag', 'cms.untracked.InputTag'):
1911  if value.processName == self._paramSearch:
1912  if self._verbose: print("set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace))
1913  setattr(getattr(pset, name),"processName",self._paramReplace)
1914 
1915  def enter(self,visitee):
1916  label = ''
1917  try:
1918  label = visitee.label()
1919  except AttributeError:
1920  label = '<Module not in a Process>'
1921  except:
1922  label = 'other execption'
1923  self.doIt(visitee, label)
1924 
1925  def leave(self,visitee):
1926  pass
1927 
1928  #visit a sequence to repalce all input tags
1929  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1930  print("Replacing all InputTag %s => %s"%(oldT,newT))
1931  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1932  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1933  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1934  if not loadMe in self.additionalCommands:
1935  self.additionalCommands.append(loadMe)
1936  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1937 
1938  #change the process name used to address HLT results in any sequence
1939  def renameHLTprocessInSequence(self,sequence,proc=None,HLTprocess='HLT'):
1940  if self._options.hltProcess:
1941  proc=self._options.hltProcess
1942  else:
1943  proc=self.process.name_()
1944  if proc==HLTprocess: return
1945  # look up all module in dqm sequence
1946  print("replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc))
1947  getattr(self.process,sequence).visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess,proc,whitelist = ("subSystemFolder",)))
1948  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1949  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1950  self.additionalCommands.append('process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1951 
1952 
1953  def expandMapping(self,seqList,mapping,index=None):
1954  maxLevel=30
1955  level=0
1956  while '@' in repr(seqList) and level<maxLevel:
1957  level+=1
1958  for specifiedCommand in seqList:
1959  if specifiedCommand.startswith('@'):
1960  location=specifiedCommand[1:]
1961  if not location in mapping:
1962  raise Exception("Impossible to map "+location+" from "+repr(mapping))
1963  mappedTo=mapping[location]
1964  if index!=None:
1965  mappedTo=mappedTo[index]
1966  seqList.remove(specifiedCommand)
1967  seqList.extend(mappedTo.split('+'))
1968  break;
1969  if level==maxLevel:
1970  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
1971 
1972  def prepare_DQM(self, stepSpec = 'DQMOffline'):
1973  # this one needs replacement
1974 
1975  # any 'DQM' job should use DQMStore in non-legacy mode (but not HARVESTING)
1976  self.loadAndRemember("DQMServices/Core/DQMStoreNonLegacy_cff")
1977  _,_dqmSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DQMOFFLINEDefaultCFF)
1978  sequenceList=_dqmSeq.split('+')
1979  postSequenceList=_dqmSeq.split('+')
1980  from DQMOffline.Configuration.autoDQM import autoDQM
1981  self.expandMapping(sequenceList,autoDQM,index=0)
1982  self.expandMapping(postSequenceList,autoDQM,index=1)
1983 
1984  if len(set(sequenceList))!=len(sequenceList):
1985  sequenceList=list(set(sequenceList))
1986  print("Duplicate entries for DQM:, using",sequenceList)
1987 
1988  pathName='dqmoffline_step'
1989  for (i,_sequence) in enumerate(sequenceList):
1990  if (i!=0):
1991  pathName='dqmoffline_%d_step'%(i)
1992 
1993  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
1994  self.renameHLTprocessInSequence(_sequence)
1995 
1996  setattr(self.process,pathName, cms.EndPath( getattr(self.process,_sequence ) ) )
1997  self.schedule.append(getattr(self.process,pathName))
1998 
1999  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
2000  #will get in the schedule, smoothly
2001  getattr(self.process,pathName).insert(0,self.process.genstepfilter)
2002 
2003 
2004  pathName='dqmofflineOnPAT_step'
2005  for (i,_sequence) in enumerate(postSequenceList):
2006  #Fix needed to avoid duplication of sequences not defined in autoDQM or without a PostDQM
2007  if (sequenceList[i]==postSequenceList[i]):
2008  continue
2009  if (i!=0):
2010  pathName='dqmofflineOnPAT_%d_step'%(i)
2011 
2012  setattr(self.process,pathName, cms.EndPath( getattr(self.process, _sequence ) ) )
2013  self.schedule.append(getattr(self.process,pathName))
2014 
2015  def prepare_HARVESTING(self, stepSpec = None):
2016  """ Enrich the process with harvesting step """
2017  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
2019 
2020  harvestingConfig,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.HARVESTINGDefaultCFF)
2021 
2022  # decide which HARVESTING paths to use
2023  harvestingList = sequence.split("+")
2024  from DQMOffline.Configuration.autoDQM import autoDQM
2025  from Validation.Configuration.autoValidation import autoValidation
2026  import copy
2027  combined_mapping = copy.deepcopy( autoDQM )
2028  combined_mapping.update( autoValidation )
2029  self.expandMapping(harvestingList,combined_mapping,index=-1)
2030 
2031  if len(set(harvestingList))!=len(harvestingList):
2032  harvestingList=list(set(harvestingList))
2033  print("Duplicate entries for HARVESTING, using",harvestingList)
2034 
2035  for name in harvestingList:
2036  if not name in harvestingConfig.__dict__:
2037  print(name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2038  # trigger hard error, like for other sequence types
2039  getattr(self.process, name)
2040  continue
2041  harvestingstream = getattr(harvestingConfig,name)
2042  if isinstance(harvestingstream,cms.Path):
2043  self.schedule.append(harvestingstream)
2044  self.blacklist_paths.append(harvestingstream)
2045  if isinstance(harvestingstream,cms.Sequence):
2046  setattr(self.process,name+"_step",cms.Path(harvestingstream))
2047  self.schedule.append(getattr(self.process,name+"_step"))
2048 
2049  self.scheduleSequence('DQMSaver','dqmsave_step')
2050  return
2051 
2052  def prepare_ALCAHARVEST(self, stepSpec = None):
2053  """ Enrich the process with AlCaHarvesting step """
2054  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2055  sequence=stepSpec.split(".")[-1]
2056 
2057  # decide which AlcaHARVESTING paths to use
2058  harvestingList = sequence.split("+")
2059 
2060 
2061 
2062  from Configuration.AlCa.autoPCL import autoPCL
2063  self.expandMapping(harvestingList,autoPCL)
2064 
2065  for name in harvestingConfig.__dict__:
2066  harvestingstream = getattr(harvestingConfig,name)
2067  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2068  self.schedule.append(harvestingstream)
2069  if isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_dbOutput"), cms.VPSet) and \
2070  isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_metadata"), cms.VPSet):
2071  self.executeAndRemember("process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name + "_dbOutput)")
2072  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name + "_metadata)")
2073  else:
2074  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2075  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2076  harvestingList.remove(name)
2077  # append the common part at the end of the sequence
2078  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2079  self.schedule.append(lastStep)
2080 
2081  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2082  print("The following harvesting could not be found : ", harvestingList)
2083  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2084 
2085 
2086 
2087  def prepare_ENDJOB(self, stepSpec = 'endOfProcess'):
2088  _,_endjobSeq,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.ENDJOBDefaultCFF)
2089  self.scheduleSequenceAtEnd(_endjobSeq,'endjob_step')
2090  return
2091 
2092  def finalizeFastSimHLT(self):
2093  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2094  self.schedule.append(self.process.reconstruction)
2095 
2096 
2097  def build_production_info(self, evt_type, evtnumber):
2098  """ Add useful info for the production. """
2099  self.process.configurationMetadata=cms.untracked.PSet\
2100  (version=cms.untracked.string("$Revision: 1.19 $"),
2101  name=cms.untracked.string("Applications"),
2102  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2103  )
2104 
2105  self.addedObjects.append(("Production Info","configurationMetadata"))
2106 
2107 
2108  def create_process(self):
2109  self.pythonCfgCode = "# Auto generated configuration file\n"
2110  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2111  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2112  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2113 
2114  # now set up the modifies
2115  modifiers=[]
2116  modifierStrings=[]
2117  modifierImports=[]
2118 
2119  if hasattr(self._options,"era") and self._options.era :
2120  # Multiple eras can be specified in a comma seperated list
2121  from Configuration.StandardSequences.Eras import eras
2122  for requestedEra in self._options.era.split(",") :
2123  modifierStrings.append(requestedEra)
2124  modifierImports.append(eras.pythonCfgLines[requestedEra])
2125  modifiers.append(getattr(eras,requestedEra))
2126 
2127 
2128  if hasattr(self._options,"procModifiers") and self._options.procModifiers:
2129  import importlib
2130  thingsImported=[]
2131  for c in self._options.procModifiers:
2132  thingsImported.extend(c.split(","))
2133  for pm in thingsImported:
2134  modifierStrings.append(pm)
2135  modifierImports.append('from Configuration.ProcessModifiers.'+pm+'_cff import '+pm)
2136  modifiers.append(getattr(importlib.import_module('Configuration.ProcessModifiers.'+pm+'_cff'),pm))
2137 
2138  self.pythonCfgCode += '\n'.join(modifierImports)+'\n\n'
2139  self.pythonCfgCode += "process = cms.Process('"+self._options.name+"'" # Start of the line, finished after the loop
2140 
2141 
2142  if len(modifierStrings)>0:
2143  self.pythonCfgCode+= ','+','.join(modifierStrings)
2144  self.pythonCfgCode+=')\n\n'
2145 
2146  #yes, the cfg code gets out of sync here if a process is passed in. That could be fixed in the future
2147  #assuming there is some way for the fwk to get the list of modifiers (and their stringified name)
2148  if self.process == None:
2149  if len(modifiers)>0:
2150  self.process = cms.Process(self._options.name,*modifiers)
2151  else:
2152  self.process = cms.Process(self._options.name)
2153 
2154 
2155 
2156 
2157  def prepare(self, doChecking = False):
2158  """ Prepare the configuration string and add missing pieces."""
2159 
2160  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2161  self.addMaxEvents()
2162  if self.with_input:
2163  self.addSource()
2164  self.addStandardSequences()
2165 
2166  self.completeInputCommand()
2167  self.addConditions()
2168 
2169 
2170  outputModuleCfgCode=""
2171  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2172  outputModuleCfgCode=self.addOutput()
2173 
2174  self.addCommon()
2175 
2176  self.pythonCfgCode += "# import of standard configurations\n"
2177  for module in self.imports:
2178  self.pythonCfgCode += ("process.load('"+module+"')\n")
2179 
2180  # production info
2181  if not hasattr(self.process,"configurationMetadata"):
2182  self.build_production_info(self._options.evt_type, self._options.number)
2183  else:
2184  #the PSet was added via a load
2185  self.addedObjects.append(("Production Info","configurationMetadata"))
2186 
2187  self.pythonCfgCode +="\n"
2188  for comment,object in self.addedObjects:
2189  if comment!="":
2190  self.pythonCfgCode += "\n# "+comment+"\n"
2191  self.pythonCfgCode += dumpPython(self.process,object)
2192 
2193  # dump the output definition
2194  self.pythonCfgCode += "\n# Output definition\n"
2195  self.pythonCfgCode += outputModuleCfgCode
2196 
2197  # dump all additional outputs (e.g. alca or skim streams)
2198  self.pythonCfgCode += "\n# Additional output definition\n"
2199  #I do not understand why the keys are not normally ordered.
2200  nl=sorted(self.additionalOutputs.keys())
2201  for name in nl:
2202  output = self.additionalOutputs[name]
2203  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2204  tmpOut = cms.EndPath(output)
2205  setattr(self.process,name+'OutPath',tmpOut)
2206  self.schedule.append(tmpOut)
2207 
2208  # dump all additional commands
2209  self.pythonCfgCode += "\n# Other statements\n"
2210  for command in self.additionalCommands:
2211  self.pythonCfgCode += command + "\n"
2212 
2213  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2214  for object in self._options.inlineObjets.split(','):
2215  if not object:
2216  continue
2217  if not hasattr(self.process,object):
2218  print('cannot inline -'+object+'- : not known')
2219  else:
2220  self.pythonCfgCode +='\n'
2221  self.pythonCfgCode +=dumpPython(self.process,object)
2222 
2223  if self._options.pileup=='HiMixEmbGEN':
2224  self.pythonCfgCode += "\nprocess.generator.embeddingMode=cms.int32(1)\n"
2225 
2226  # dump all paths
2227  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2228  for path in self.process.paths:
2229  if getattr(self.process,path) not in self.blacklist_paths:
2230  self.pythonCfgCode += dumpPython(self.process,path)
2231 
2232  for endpath in self.process.endpaths:
2233  if getattr(self.process,endpath) not in self.blacklist_paths:
2234  self.pythonCfgCode += dumpPython(self.process,endpath)
2235 
2236  # dump the schedule
2237  self.pythonCfgCode += "\n# Schedule definition\n"
2238 
2239  # handling of the schedule
2240  pathNames = ['process.'+p.label_() for p in self.schedule]
2241  if self.process.schedule == None:
2242  self.process.schedule = cms.Schedule()
2243  for item in self.schedule:
2244  self.process.schedule.append(item)
2245  result = 'process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2246  else:
2247  if not isinstance(self.scheduleIndexOfFirstHLTPath, int):
2248  raise Exception('the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2249 
2250  for index, item in enumerate(self.schedule):
2251  if index < self.scheduleIndexOfFirstHLTPath:
2252  self.process.schedule.insert(index, item)
2253  else:
2254  self.process.schedule.append(item)
2255 
2256  result = "# process.schedule imported from cff in HLTrigger.Configuration\n"
2257  for index, item in enumerate(pathNames[:self.scheduleIndexOfFirstHLTPath]):
2258  result += 'process.schedule.insert('+str(index)+', '+item+')\n'
2259  if self.scheduleIndexOfFirstHLTPath < len(pathNames):
2260  result += 'process.schedule.extend(['+','.join(pathNames[self.scheduleIndexOfFirstHLTPath:])+'])\n'
2261 
2262  self.pythonCfgCode += result
2263 
2264  for labelToAssociate in self.labelsToAssociate:
2265  self.process.schedule.associate(getattr(self.process, labelToAssociate))
2266  self.pythonCfgCode += 'process.schedule.associate(process.' + labelToAssociate + ')\n'
2267 
2268  from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
2270  self.pythonCfgCode+="from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n"
2271  self.pythonCfgCode+="associatePatAlgosToolsTask(process)\n"
2272 
2273  overrideThreads = (self._options.nThreads != "1")
2274  overrideConcurrentLumis = (self._options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2275  overrideConcurrentIOVs = (self._options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2276 
2277  if overrideThreads or overrideConcurrentLumis or overrideConcurrentIOVs:
2278  self.pythonCfgCode +="\n"
2279  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2280  if overrideThreads:
2281  self.pythonCfgCode +="process.options.numberOfThreads = "+self._options.nThreads+"\n"
2282  self.pythonCfgCode +="process.options.numberOfStreams = "+self._options.nStreams+"\n"
2283  self.process.options.numberOfThreads = int(self._options.nThreads)
2284  self.process.options.numberOfStreams = int(self._options.nStreams)
2285  if overrideConcurrentLumis:
2286  self.pythonCfgCode +="process.options.numberOfConcurrentLuminosityBlocks = "+self._options.nConcurrentLumis+"\n"
2287  self.process.options.numberOfConcurrentLuminosityBlocks = int(self._options.nConcurrentLumis)
2288  if overrideConcurrentIOVs:
2289  self.pythonCfgCode +="process.options.eventSetup.numberOfConcurrentIOVs = "+self._options.nConcurrentIOVs+"\n"
2290  self.process.options.eventSetup.numberOfConcurrentIOVs = int(self._options.nConcurrentIOVs)
2291 
2292  if self._options.accelerators is not None:
2293  accelerators = self._options.accelerators.split(',')
2294  self.pythonCfgCode += "\n"
2295  self.pythonCfgCode += "# Enable only these accelerator backends\n"
2296  self.pythonCfgCode += "process.load('Configuration.StandardSequences.Accelerators_cff')\n"
2297  self.pythonCfgCode += "process.options.accelerators = ['" + "', '".join(accelerators) + "']\n"
2298  self.process.load('Configuration.StandardSequences.Accelerators_cff')
2299  self.process.options.accelerators = accelerators
2300 
2301  #repacked version
2302  if self._options.isRepacked:
2303  self.pythonCfgCode +="\n"
2304  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2305  self.pythonCfgCode +="MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n"
2306  MassReplaceInputTag(self.process, new="rawDataMapperByLabel", old="rawDataCollector")
2307 
2308  # special treatment in case of production filter sequence 2/2
2309  if self.productionFilterSequence and not (self._options.pileup=='HiMixEmbGEN'):
2310  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2311  self.pythonCfgCode +='for path in process.paths:\n'
2312  if len(self.conditionalPaths):
2313  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2314  if len(self.excludedPaths):
2315  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2316  self.pythonCfgCode +='\tgetattr(process,path).insert(0, process.%s)\n'%(self.productionFilterSequence,)
2317  pfs = getattr(self.process,self.productionFilterSequence)
2318  for path in self.process.paths:
2319  if not path in self.conditionalPaths: continue
2320  if path in self.excludedPaths: continue
2321  getattr(self.process,path).insert(0, pfs)
2322 
2323 
2324  # dump customise fragment
2325  self.pythonCfgCode += self.addCustomise()
2326 
2327  if self._options.runUnscheduled:
2328  print("--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2329  # Keep the "unscheduled customise functions" separate for now,
2330  # there are customize functions given by users (in our unit
2331  # tests) that need to be run before the "unscheduled customise
2332  # functions"
2333  self.pythonCfgCode += self.addCustomise(1)
2334 
2335  self.pythonCfgCode += self.addCustomiseCmdLine()
2336 
2337  if hasattr(self.process,"logErrorHarvester"):
2338  #configure logErrorHarvester to wait for same EDProducers to finish as the OutputModules
2339  self.pythonCfgCode +="\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n"
2340  self.pythonCfgCode +="from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n"
2341  self.pythonCfgCode +="process = customiseLogErrorHarvesterUsingOutputCommands(process)\n"
2342  from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands
2344 
2345  # Temporary hack to put the early delete customization after
2346  # everything else
2347  #
2348  # FIXME: remove when no longer needed
2349  self.pythonCfgCode += "\n# Add early deletion of temporary data products to reduce peak memory need\n"
2350  self.pythonCfgCode += "from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2351  self.pythonCfgCode += "process = customiseEarlyDelete(process)\n"
2352  self.pythonCfgCode += "# End adding early deletion\n"
2353  from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
2354  self.process = customiseEarlyDelete(self.process)
2355 
2356  imports = cms.specialImportRegistry.getSpecialImports()
2357  if len(imports) > 0:
2358  #need to inject this at the top
2359  index = self.pythonCfgCode.find("import FWCore.ParameterSet.Config")
2360  #now find the end of line
2361  index = self.pythonCfgCode.find("\n",index)
2362  self.pythonCfgCode = self.pythonCfgCode[:index]+ "\n" + "\n".join(imports)+"\n" +self.pythonCfgCode[index:]
2363 
2364 
2365  # make the .io file
2366 
2367  if self._options.io:
2368  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2369  if not self._options.io.endswith('.io'): self._option.io+='.io'
2370  io=open(self._options.io,'w')
2371  ioJson={}
2372  if hasattr(self.process.source,"fileNames"):
2373  if len(self.process.source.fileNames.value()):
2374  ioJson['primary']=self.process.source.fileNames.value()
2375  if hasattr(self.process.source,"secondaryFileNames"):
2376  if len(self.process.source.secondaryFileNames.value()):
2377  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2378  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2379  ioJson['pileup']=self._options.pileup_input[4:]
2380  for (o,om) in self.process.outputModules_().items():
2381  ioJson[o]=om.fileName.value()
2382  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2383  if self.productionFilterSequence:
2384  ioJson['filter']=self.productionFilterSequence
2385  import json
2386  io.write(json.dumps(ioJson))
2387  return
2388 
2389 
def load(self, includeFile)
def prepare_L1(self, stepSpec=None)
def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ='')
def expandMapping(self, seqList, mapping, index=None)
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:37
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
Definition: MassReplace.py:79
inliner
load the relevant part
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def prepare_CFWRITER(self, stepSpec=None)
def prepare_RECOBEFMIX(self, stepSpec="reconstruction")
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
Definition: FindCaloHit.cc:19
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, stepSpec=None)
assert(be >=bs)
def build_production_info(self, evt_type, evtnumber)
def ProcessName(process)
Definition: CustomConfigs.py:6
def prepare_RECOSIM(self, stepSpec="recosim")
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def prepare_L1HwVal(self, stepSpec='L1HwVal')
def prepare_DIGI2RAW(self, stepSpec=None)
def prepare_POSTRECO(self, stepSpec=None)
def prepare_SKIM(self, stepSpec="all")
def prepare_ALCAPRODUCER(self, stepSpec=None)
def prepare_HARVESTING(self, stepSpec=None)
def prepare_ALCAOUTPUT(self, stepSpec=None)
def prepare_RAW2DIGI(self, stepSpec="RawToDigi")
def prepare_GEN(self, stepSpec=None)
def prepare_FILTER(self, stepSpec=None)
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def prepare_PAT(self, stepSpec="miniAOD")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_L1Reco(self, stepSpec="L1Reco")
def prepare_HLT(self, stepSpec=None)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
Definition: HCMethods.h:50
def prepare_DIGI(self, stepSpec=None)
def loadAndRemember(self, includeFile)
def prepare_ENDJOB(self, stepSpec='endOfProcess')
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_DQM(self, stepSpec='DQMOffline')
def prepare_ALCAHARVEST(self, stepSpec=None)
static std::string join(char **cmd)
Definition: RemoteFile.cc:19
def prepare_USER(self, stepSpec=None)
def prepare_ALCA(self, stepSpec=None, workflow='full')
def defineMixing(dict)
Definition: Mixing.py:194
def dumpPython(process, name)
def miniAOD_customizeOutput(out)
def encode(args, files)
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
Definition: helpers.py:24
def prepare_REPACK(self, stepSpec=None)
def prepare_NANOGEN(self, stepSpec="nanoAOD")
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_L1REPACK(self, stepSpec=None)
def prepare_L1TrackTrigger(self, stepSpec="L1TrackTrigger")
def prepare_RAW2RECO(self, stepSpec=None)
def prepare_NANO(self, stepSpec='')
def prepare_VALIDATION(self, stepSpec='validation')
def lumi_to_run(runs, events_in_sample, events_per_job)
Definition: LumiToRun.py:1
def scheduleSequenceAtEnd(self, seq, prefix)
#define str(s)
def prepare_RECO(self, stepSpec="reconstruction")
def prepare_SIM(self, stepSpec=None)
def filesFromList(fileName, s=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def prepare_PATGEN(self, stepSpec="miniGEN")
def prepare_LHE(self, stepSpec=None)
def prepare_DATAMIX(self, stepSpec=None)
def executeAndRemember(self, command)
nextScheduleIsConditional
put the filtering path in the schedule