CMS 3D CMS Logo

ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python3
2 
3 from __future__ import print_function
4 __version__ = "$Revision: 1.19 $"
5 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
6 
7 import FWCore.ParameterSet.Config as cms
8 from FWCore.ParameterSet.Modules import _Module
9 # The following import is provided for backward compatibility reasons.
10 # The function used to be defined in this file.
11 from FWCore.ParameterSet.MassReplace import massReplaceInputTag as MassReplaceInputTag
12 
13 import hashlib
14 import sys
15 import re
16 import collections
17 from subprocess import Popen,PIPE
18 import FWCore.ParameterSet.DictTypes as DictTypes
19 class Options:
20  pass
21 
22 # the canonical defaults
23 defaultOptions = Options()
24 defaultOptions.datamix = 'DataOnSim'
25 defaultOptions.isMC=False
26 defaultOptions.isData=True
27 defaultOptions.step=''
28 defaultOptions.pileup='NoPileUp'
29 defaultOptions.pileup_input = None
30 defaultOptions.pileup_dasoption = ''
31 defaultOptions.geometry = 'SimDB'
32 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
33 defaultOptions.magField = ''
34 defaultOptions.conditions = None
35 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
36 defaultOptions.harvesting= 'AtRunEnd'
37 defaultOptions.gflash = False
38 defaultOptions.number = -1
39 defaultOptions.number_out = None
40 defaultOptions.arguments = ""
41 defaultOptions.name = "NO NAME GIVEN"
42 defaultOptions.evt_type = ""
43 defaultOptions.filein = ""
44 defaultOptions.dasquery=""
45 defaultOptions.dasoption=""
46 defaultOptions.secondfilein = ""
47 defaultOptions.customisation_file = []
48 defaultOptions.customisation_file_unsch = []
49 defaultOptions.customise_commands = ""
50 defaultOptions.inline_custom=False
51 defaultOptions.particleTable = 'pythiapdt'
52 defaultOptions.particleTableList = ['pythiapdt','pdt']
53 defaultOptions.dirin = ''
54 defaultOptions.dirout = ''
55 defaultOptions.filetype = 'EDM'
56 defaultOptions.fileout = 'output.root'
57 defaultOptions.filtername = ''
58 defaultOptions.lazy_download = False
59 defaultOptions.custom_conditions = ''
60 defaultOptions.hltProcess = ''
61 defaultOptions.eventcontent = None
62 defaultOptions.datatier = None
63 defaultOptions.inlineEventContent = True
64 defaultOptions.inlineObjets =''
65 defaultOptions.hideGen=False
66 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
67 defaultOptions.beamspot=None
68 defaultOptions.outputDefinition =''
69 defaultOptions.inputCommands = None
70 defaultOptions.outputCommands = None
71 defaultOptions.inputEventContent = ''
72 defaultOptions.dropDescendant = False
73 defaultOptions.relval = None
74 defaultOptions.profile = None
75 defaultOptions.heap_profile = None
76 defaultOptions.isRepacked = False
77 defaultOptions.restoreRNDSeeds = False
78 defaultOptions.donotDropOnInput = ''
79 defaultOptions.python_filename =''
80 defaultOptions.io=None
81 defaultOptions.lumiToProcess=None
82 defaultOptions.fast=False
83 defaultOptions.runsAndWeightsForMC = None
84 defaultOptions.runsScenarioForMC = None
85 defaultOptions.runsAndWeightsForMCIntegerWeights = None
86 defaultOptions.runsScenarioForMCIntegerWeights = None
87 defaultOptions.runUnscheduled = False
88 defaultOptions.timeoutOutput = False
89 defaultOptions.nThreads = '1'
90 defaultOptions.nStreams = '0'
91 defaultOptions.nConcurrentLumis = '0'
92 defaultOptions.nConcurrentIOVs = '0'
93 defaultOptions.accelerators = None
94 
95 # some helper routines
96 def dumpPython(process,name):
97  theObject = getattr(process,name)
98  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
99  return "process."+name+" = " + theObject.dumpPython()
100  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
101  return "process."+name+" = " + theObject.dumpPython()+"\n"
102  else:
103  return "process."+name+" = " + theObject.dumpPython()+"\n"
104 def filesFromList(fileName,s=None):
105  import os
106  import FWCore.ParameterSet.Config as cms
107  prim=[]
108  sec=[]
109  for line in open(fileName,'r'):
110  if line.count(".root")>=2:
111  #two files solution...
112  entries=line.replace("\n","").split()
113  prim.append(entries[0])
114  sec.append(entries[1])
115  elif (line.find(".root")!=-1):
116  entry=line.replace("\n","")
117  prim.append(entry)
118  # remove any duplicates but keep the order
119  file_seen = set()
120  prim = [f for f in prim if not (f in file_seen or file_seen.add(f))]
121  file_seen = set()
122  sec = [f for f in sec if not (f in file_seen or file_seen.add(f))]
123  if s:
124  if not hasattr(s,"fileNames"):
125  s.fileNames=cms.untracked.vstring(prim)
126  else:
127  s.fileNames.extend(prim)
128  if len(sec)!=0:
129  if not hasattr(s,"secondaryFileNames"):
130  s.secondaryFileNames=cms.untracked.vstring(sec)
131  else:
132  s.secondaryFileNames.extend(sec)
133  print("found files: ",prim)
134  if len(prim)==0:
135  raise Exception("There are not files in input from the file list")
136  if len(sec)!=0:
137  print("found parent files:",sec)
138  return (prim,sec)
139 
140 def filesFromDASQuery(query,option="",s=None):
141  import os,time
142  import FWCore.ParameterSet.Config as cms
143  prim=[]
144  sec=[]
145  print("the query is",query)
146  eC=5
147  count=0
148  while eC!=0 and count<3:
149  if count!=0:
150  print('Sleeping, then retrying DAS')
151  time.sleep(100)
152  p = Popen('dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=True, universal_newlines=True)
153  pipe=p.stdout.read()
154  tupleP = os.waitpid(p.pid, 0)
155  eC=tupleP[1]
156  count=count+1
157  if eC==0:
158  print("DAS succeeded after",count,"attempts",eC)
159  else:
160  print("DAS failed 3 times- I give up")
161  for line in pipe.split('\n'):
162  if line.count(".root")>=2:
163  #two files solution...
164  entries=line.replace("\n","").split()
165  prim.append(entries[0])
166  sec.append(entries[1])
167  elif (line.find(".root")!=-1):
168  entry=line.replace("\n","")
169  prim.append(entry)
170  # remove any duplicates
171  prim = sorted(list(set(prim)))
172  sec = sorted(list(set(sec)))
173  if s:
174  if not hasattr(s,"fileNames"):
175  s.fileNames=cms.untracked.vstring(prim)
176  else:
177  s.fileNames.extend(prim)
178  if len(sec)!=0:
179  if not hasattr(s,"secondaryFileNames"):
180  s.secondaryFileNames=cms.untracked.vstring(sec)
181  else:
182  s.secondaryFileNames.extend(sec)
183  print("found files: ",prim)
184  if len(sec)!=0:
185  print("found parent files:",sec)
186  return (prim,sec)
187 
188 def anyOf(listOfKeys,dict,opt=None):
189  for k in listOfKeys:
190  if k in dict:
191  toReturn=dict[k]
192  dict.pop(k)
193  return toReturn
194  if opt!=None:
195  return opt
196  else:
197  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
198 
200  """The main building routines """
201 
202  def __init__(self, options, process = None, with_output = False, with_input = False ):
203  """options taken from old cmsDriver and optparse """
204 
205  options.outfile_name = options.dirout+options.fileout
206 
207  self._options = options
208 
209  if self._options.isData and options.isMC:
210  raise Exception("ERROR: You may specify only --data or --mc, not both")
211  #if not self._options.conditions:
212  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
213 
214  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
215  if 'ENDJOB' in self._options.step:
216  if (hasattr(self._options,"outputDefinition") and \
217  self._options.outputDefinition != '' and \
218  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
219  (hasattr(self._options,"datatier") and \
220  self._options.datatier and \
221  'DQMIO' in self._options.datatier):
222  print("removing ENDJOB from steps since not compatible with DQMIO dataTier")
223  self._options.step=self._options.step.replace(',ENDJOB','')
224 
225 
226 
227  # what steps are provided by this class?
228  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
229  self.stepMap={}
230  self.stepKeys=[]
231  for step in self._options.step.split(","):
232  if step=='': continue
233  stepParts = step.split(":")
234  stepName = stepParts[0]
235  if stepName not in stepList and not stepName.startswith('re'):
236  raise ValueError("Step {} unknown. Available are {}".format( stepName , sorted(stepList)))
237  if len(stepParts)==1:
238  self.stepMap[stepName]=""
239  elif len(stepParts)==2:
240  self.stepMap[stepName]=stepParts[1].split('+')
241  elif len(stepParts)==3:
242  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
243  else:
244  raise ValueError(f"Step definition {step} invalid")
245  self.stepKeys.append(stepName)
246 
247  #print(f"map of steps is: {self.stepMap}")
248 
249  self.with_output = with_output
250  self.process=process
251 
252  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
253  self.with_output = False
254  self.with_input = with_input
255  self.imports = []
256  self.create_process()
257  self.define_Configs()
258  self.schedule = list()
260 
261  # we are doing three things here:
262  # creating a process to catch errors
263  # building the code to re-create the process
264 
266  # TODO: maybe a list of to be dumped objects would help as well
267  self.blacklist_paths = []
268  self.addedObjects = []
270 
276 
277  def profileOptions(self):
278  """
279  addIgProfService
280  Function to add the igprof profile service so that you can dump in the middle
281  of the run.
282  """
283  profileOpts = self._options.profile.split(':')
284  profilerStart = 1
285  profilerInterval = 100
286  profilerFormat = None
287  profilerJobFormat = None
288 
289  if len(profileOpts):
290  #type, given as first argument is unused here
291  profileOpts.pop(0)
292  if len(profileOpts):
293  startEvent = profileOpts.pop(0)
294  if not startEvent.isdigit():
295  raise Exception("%s is not a number" % startEvent)
296  profilerStart = int(startEvent)
297  if len(profileOpts):
298  eventInterval = profileOpts.pop(0)
299  if not eventInterval.isdigit():
300  raise Exception("%s is not a number" % eventInterval)
301  profilerInterval = int(eventInterval)
302  if len(profileOpts):
303  profilerFormat = profileOpts.pop(0)
304 
305 
306  if not profilerFormat:
307  profilerFormat = "%s___%s___%%I.gz" % (
308  self._options.evt_type.replace("_cfi", ""),
309  hashlib.md5(
310  (str(self._options.step) + str(self._options.pileup) + str(self._options.conditions) +
311  str(self._options.datatier) + str(self._options.profileTypeLabel)).encode('utf-8')
312  ).hexdigest()
313  )
314  if not profilerJobFormat and profilerFormat.endswith(".gz"):
315  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
316  elif not profilerJobFormat:
317  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
318 
319  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
320 
322  """
323  addJeProfService
324  Function to add the jemalloc heap profile service so that you can dump in the middle
325  of the run.
326  """
327  profileOpts = self._options.profile.split(':')
328  profilerStart = 1
329  profilerInterval = 100
330  profilerFormat = None
331  profilerJobFormat = None
332 
333  if len(profileOpts):
334  #type, given as first argument is unused here
335  profileOpts.pop(0)
336  if len(profileOpts):
337  startEvent = profileOpts.pop(0)
338  if not startEvent.isdigit():
339  raise Exception("%s is not a number" % startEvent)
340  profilerStart = int(startEvent)
341  if len(profileOpts):
342  eventInterval = profileOpts.pop(0)
343  if not eventInterval.isdigit():
344  raise Exception("%s is not a number" % eventInterval)
345  profilerInterval = int(eventInterval)
346  if len(profileOpts):
347  profilerFormat = profileOpts.pop(0)
348 
349 
350  if not profilerFormat:
351  profilerFormat = "%s___%s___%%I.heap" % (
352  self._options.evt_type.replace("_cfi", ""),
353  hashlib.md5(
354  (str(self._options.step) + str(self._options.pileup) + str(self._options.conditions) +
355  str(self._options.datatier) + str(self._options.profileTypeLabel)).encode('utf-8')
356  ).hexdigest()
357  )
358  if not profilerJobFormat and profilerFormat.endswith(".heap"):
359  profilerJobFormat = profilerFormat.replace(".heap", "_EndOfJob.heap")
360  elif not profilerJobFormat:
361  profilerJobFormat = profilerFormat + "_EndOfJob.heap"
362 
363  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
364 
365  def load(self,includeFile):
366  includeFile = includeFile.replace('/','.')
367  self.process.load(includeFile)
368  return sys.modules[includeFile]
369 
370  def loadAndRemember(self, includeFile):
371  """helper routine to load am memorize imports"""
372  # we could make the imports a on-the-fly data method of the process instance itself
373  # not sure if the latter is a good idea
374  includeFile = includeFile.replace('/','.')
375  self.imports.append(includeFile)
376  self.process.load(includeFile)
377  return sys.modules[includeFile]
378 
379  def executeAndRemember(self, command):
380  """helper routine to remember replace statements"""
381  self.additionalCommands.append(command)
382  if not command.strip().startswith("#"):
383  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
384  import re
385  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
386  #exec(command.replace("process.","self.process."))
387 
388  def addCommon(self):
389  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
390  self.process.options.Rethrow = ['ProductNotFound']
391  self.process.options.fileMode = 'FULLMERGE'
392 
393  self.addedObjects.append(("","options"))
394 
395  if self._options.lazy_download:
396  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
397  stats = cms.untracked.bool(True),
398  enable = cms.untracked.bool(True),
399  cacheHint = cms.untracked.string("lazy-download"),
400  readHint = cms.untracked.string("read-ahead-buffered")
401  )
402  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
403 
404  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
405  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
406 
407  if self._options.profile:
408  (start, interval, eventFormat, jobFormat)=self.profileOptions()
409  self.process.IgProfService = cms.Service("IgProfService",
410  reportFirstEvent = cms.untracked.int32(start),
411  reportEventInterval = cms.untracked.int32(interval),
412  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
413  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
414  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
415 
416  if self._options.heap_profile:
417  (start, interval, eventFormat, jobFormat)=self.heapProfileOptions()
418  self.process.JeProfService = cms.Service("JeProfService",
419  reportFirstEvent = cms.untracked.int32(start),
420  reportEventInterval = cms.untracked.int32(interval),
421  reportToFileAtPostEvent = cms.untracked.string("%s"%(eventFormat)),
422  reportToFileAtPostEndJob = cms.untracked.string("%s"%(jobFormat)))
423  self.addedObjects.append(("Setup JeProf Service for heap profiling","JeProfService"))
424 
425  def addMaxEvents(self):
426  """Here we decide how many evts will be processed"""
427  self.process.maxEvents.input = int(self._options.number)
428  if self._options.number_out:
429  self.process.maxEvents.output = int(self._options.number_out)
430  self.addedObjects.append(("","maxEvents"))
431 
432  def addSource(self):
433  """Here the source is built. Priority: file, generator"""
434  self.addedObjects.append(("Input source","source"))
435 
436  def filesFromOption(self):
437  for entry in self._options.filein.split(','):
438  print("entry",entry)
439  if entry.startswith("filelist:"):
440  filesFromList(entry[9:],self.process.source)
441  elif entry.startswith("dbs:") or entry.startswith("das:"):
442  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
443  else:
444  self.process.source.fileNames.append(self._options.dirin+entry)
445  if self._options.secondfilein:
446  if not hasattr(self.process.source,"secondaryFileNames"):
447  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
448  for entry in self._options.secondfilein.split(','):
449  print("entry",entry)
450  if entry.startswith("filelist:"):
451  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
452  elif entry.startswith("dbs:") or entry.startswith("das:"):
453  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
454  else:
455  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
456 
457  if self._options.filein or self._options.dasquery:
458  if self._options.filetype == "EDM":
459  self.process.source=cms.Source("PoolSource",
460  fileNames = cms.untracked.vstring(),
461  secondaryFileNames= cms.untracked.vstring())
462  filesFromOption(self)
463  elif self._options.filetype == "DAT":
464  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
465  filesFromOption(self)
466  elif self._options.filetype == "LHE":
467  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
468  if self._options.filein.startswith("lhe:"):
469  #list the article directory automatically
470  args=self._options.filein.split(':')
471  article=args[1]
472  print('LHE input from article ',article)
473  location='/store/lhe/'
474  import os
475  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
476  for line in textOfFiles:
477  for fileName in [x for x in line.split() if '.lhe' in x]:
478  self.process.source.fileNames.append(location+article+'/'+fileName)
479  #check first if list of LHE files is loaded (not empty)
480  if len(line)<2:
481  print('Issue to load LHE files, please check and try again.')
482  sys.exit(-1)
483  #Additional check to protect empty fileNames in process.source
484  if len(self.process.source.fileNames)==0:
485  print('Issue with empty filename, but can pass line check')
486  sys.exit(-1)
487  if len(args)>2:
488  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
489  else:
490  filesFromOption(self)
491 
492  elif self._options.filetype == "DQM":
493  self.process.source=cms.Source("DQMRootSource",
494  fileNames = cms.untracked.vstring())
495  filesFromOption(self)
496 
497  elif self._options.filetype == "DQMDAQ":
498  # FIXME: how to configure it if there are no input files specified?
499  self.process.source=cms.Source("DQMStreamerReader")
500 
501 
502  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
503  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
504 
505  if self._options.dasquery!='':
506  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
507  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
508 
509  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
510  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
511 
512 
513  if 'GEN' in self.stepMap.keys() and not self._options.filetype == "LHE":
514  if self._options.inputCommands:
515  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
516  else:
517  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
518 
519  if self.process.source and self._options.inputCommands and not self._options.filetype == "LHE":
520  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
521  for command in self._options.inputCommands.split(','):
522  # remove whitespace around the keep/drop statements
523  command = command.strip()
524  if command=='': continue
525  self.process.source.inputCommands.append(command)
526  if not self._options.dropDescendant:
527  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
528 
529  if self._options.lumiToProcess:
530  import FWCore.PythonUtilities.LumiList as LumiList
531  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
532 
533  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
534  if self.process.source is None:
535  self.process.source=cms.Source("EmptySource")
536 
537  # modify source in case of run-dependent MC
538  self.runsAndWeights=None
539  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
540  if not self._options.isMC :
541  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
542  if self._options.runsAndWeightsForMC:
543  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
544  else:
545  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
546  if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
547  __import__(RunsAndWeights[self._options.runsScenarioForMC])
548  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
549  else:
550  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
551 
552  if self.runsAndWeights:
553  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
555  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
556  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
557 
558  # modify source in case of run-dependent MC (Run-3 method)
560  if self._options.runsAndWeightsForMCIntegerWeights or self._options.runsScenarioForMCIntegerWeights:
561  if not self._options.isMC :
562  raise Exception("options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
563  if self._options.runsAndWeightsForMCIntegerWeights:
564  self.runsAndWeightsInt = eval(self._options.runsAndWeightsForMCIntegerWeights)
565  else:
566  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
567  if isinstance(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights], str):
568  __import__(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights])
569  self.runsAndWeightsInt = sys.modules[RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
570  else:
571  self.runsAndWeightsInt = RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]
572 
573  if self.runsAndWeightsInt:
574  if not self._options.relval:
575  raise Exception("--relval option required when using --runsAndWeightsInt")
576  if 'DATAMIX' in self._options.step:
577  from SimGeneral.Configuration.LumiToRun import lumi_to_run
578  total_events, events_per_job = self._options.relval.split(',')
579  lumi_to_run_mapping = lumi_to_run(self.runsAndWeightsInt, int(total_events), int(events_per_job))
580  self.additionalCommands.append("process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " + str(lumi_to_run_mapping) + "])")
581 
582  return
583 
584  def addOutput(self):
585  """ Add output module to the process """
586  result=""
587  if self._options.outputDefinition:
588  if self._options.datatier:
589  print("--datatier & --eventcontent options ignored")
590 
591  #new output convention with a list of dict
592  outList = eval(self._options.outputDefinition)
593  for (id,outDefDict) in enumerate(outList):
594  outDefDictStr=outDefDict.__str__()
595  if not isinstance(outDefDict,dict):
596  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
597  #requires option: tier
598  theTier=anyOf(['t','tier','dataTier'],outDefDict)
599  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
600 
601  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
602  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
603  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
604  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
605  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
606  # module label has a particular role
607  if not theModuleLabel:
608  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
609  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
610  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
611  ]
612  for name in tryNames:
613  if not hasattr(self.process,name):
614  theModuleLabel=name
615  break
616  if not theModuleLabel:
617  raise Exception("cannot find a module label for specification: "+outDefDictStr)
618  if id==0:
619  defaultFileName=self._options.outfile_name
620  else:
621  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
622 
623  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
624  if not theFileName.endswith('.root'):
625  theFileName+='.root'
626 
627  if len(outDefDict):
628  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
629  if theStreamType=='DQMIO': theStreamType='DQM'
630  if theStreamType=='ALL':
631  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
632  else:
633  theEventContent = getattr(self.process, theStreamType+"EventContent")
634 
635 
636  addAlCaSelects=False
637  if theStreamType=='ALCARECO' and not theFilterName:
638  theFilterName='StreamALCACombined'
639  addAlCaSelects=True
640 
641  CppType='PoolOutputModule'
642  if self._options.timeoutOutput:
643  CppType='TimeoutPoolOutputModule'
644  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
645  output = cms.OutputModule(CppType,
646  theEventContent.clone(),
647  fileName = cms.untracked.string(theFileName),
648  dataset = cms.untracked.PSet(
649  dataTier = cms.untracked.string(theTier),
650  filterName = cms.untracked.string(theFilterName))
651  )
652  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
653  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
654  if not theSelectEvent and hasattr(self.process,'filtering_step'):
655  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
656  if theSelectEvent:
657  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
658 
659  if addAlCaSelects:
660  if not hasattr(output,'SelectEvents'):
661  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
662  for alca in self.AlCaPaths:
663  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
664 
665 
666  if hasattr(self.process,theModuleLabel):
667  raise Exception("the current process already has a module "+theModuleLabel+" defined")
668  #print "creating output module ",theModuleLabel
669  setattr(self.process,theModuleLabel,output)
670  outputModule=getattr(self.process,theModuleLabel)
671  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
672  path=getattr(self.process,theModuleLabel+'_step')
673  self.schedule.append(path)
674 
675  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
676  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"): return label
677  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
678  if theExtraOutputCommands:
679  if not isinstance(theExtraOutputCommands,list):
680  raise Exception("extra ouput command in --option must be a list of strings")
681  if hasattr(self.process,theStreamType+"EventContent"):
682  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
683  else:
684  outputModule.outputCommands.extend(theExtraOutputCommands)
685 
686  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
687 
688 
689  return result
690 
691  streamTypes=self._options.eventcontent.split(',')
692  tiers=self._options.datatier.split(',')
693  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
694  raise Exception("number of event content arguments does not match number of datatier arguments")
695 
696  # if the only step is alca we don't need to put in an output
697  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
698  return "\n"
699 
700  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
701  if streamType=='': continue
702  if streamType == 'ALCARECO' and not 'ALCAPRODUCER' in self._options.step: continue
703  if streamType=='DQMIO': streamType='DQM'
704  eventContent=streamType
705 
706  if streamType == "NANOEDMAOD" :
707  eventContent = "NANOAOD"
708  elif streamType == "NANOEDMAODSIM" :
709  eventContent = "NANOAODSIM"
710  theEventContent = getattr(self.process, eventContent+"EventContent")
711  if i==0:
712  theFileName=self._options.outfile_name
713  theFilterName=self._options.filtername
714  else:
715  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
716  theFilterName=self._options.filtername
717  CppType='PoolOutputModule'
718  if self._options.timeoutOutput:
719  CppType='TimeoutPoolOutputModule'
720  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
721  if "NANOAOD" in streamType : CppType='NanoAODOutputModule'
722  output = cms.OutputModule(CppType,
723  theEventContent,
724  fileName = cms.untracked.string(theFileName),
725  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
726  filterName = cms.untracked.string(theFilterName)
727  )
728  )
729  if hasattr(self.process,"generation_step") and streamType!='LHE':
730  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
731  if hasattr(self.process,"filtering_step"):
732  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
733 
734  if streamType=='ALCARECO':
735  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
736 
737  if "MINIAOD" in streamType:
738  from PhysicsTools.PatAlgos.slimming.miniAOD_tools import miniAOD_customizeOutput
740 
741  outputModuleName=streamType+'output'
742  setattr(self.process,outputModuleName,output)
743  outputModule=getattr(self.process,outputModuleName)
744  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
745  path=getattr(self.process,outputModuleName+'_step')
746  self.schedule.append(path)
747 
748  if self._options.outputCommands and streamType!='DQM':
749  for evct in self._options.outputCommands.split(','):
750  if not evct: continue
751  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
752 
753  if not self._options.inlineEventContent:
754  tmpstreamType=streamType
755  if "NANOEDM" in tmpstreamType :
756  tmpstreamType=tmpstreamType.replace("NANOEDM","NANO")
757  def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
758  return label
759  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
760 
761  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
762 
763  return result
764 
765  def addStandardSequences(self):
766  """
767  Add selected standard sequences to the process
768  """
769  # load the pile up file
770  if self._options.pileup:
771  pileupSpec=self._options.pileup.split(',')[0]
772 
773  # Does the requested pile-up scenario exist?
774  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
775  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
776  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
777  raise Exception(message)
778 
779  # Put mixing parameters in a dictionary
780  if '.' in pileupSpec:
781  mixingDict={'file':pileupSpec}
782  elif pileupSpec.startswith('file:'):
783  mixingDict={'file':pileupSpec[5:]}
784  else:
785  import copy
786  mixingDict=copy.copy(Mixing[pileupSpec])
787  if len(self._options.pileup.split(','))>1:
788  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
789 
790  # Load the pu cfg file corresponding to the requested pu scenario
791  if 'file:' in pileupSpec:
792  #the file is local
793  self.process.load(mixingDict['file'])
794  print("inlining mixing module configuration")
795  self._options.inlineObjets+=',mix'
796  else:
797  self.loadAndRemember(mixingDict['file'])
798 
799  mixingDict.pop('file')
800  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
801  if self._options.pileup_input:
802  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
803  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
804  elif self._options.pileup_input.startswith("filelist:"):
805  mixingDict['F']=(filesFromList(self._options.pileup_input[9:]))[0]
806  else:
807  mixingDict['F']=self._options.pileup_input.split(',')
808  specialization=defineMixing(mixingDict)
809  for command in specialization:
810  self.executeAndRemember(command)
811  if len(mixingDict)!=0:
812  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
813 
814 
815  # load the geometry file
816  try:
817  if len(self.stepMap):
818  self.loadAndRemember(self.GeometryCFF)
819  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
821  if self.geometryDBLabel:
822  self.executeAndRemember('if hasattr(process, "XMLFromDBSource"): process.XMLFromDBSource.label="%s"'%(self.geometryDBLabel))
823  self.executeAndRemember('if hasattr(process, "DDDetectorESProducerFromDB"): process.DDDetectorESProducerFromDB.label="%s"'%(self.geometryDBLabel))
824 
825  except ImportError:
826  print("Geometry option",self._options.geometry,"unknown.")
827  raise
828 
829  if len(self.stepMap):
830  self.loadAndRemember(self.magFieldCFF)
831 
832  for stepName in self.stepKeys:
833  stepSpec = self.stepMap[stepName]
834  print("Step:", stepName,"Spec:",stepSpec)
835  if stepName.startswith('re'):
836 
837  if stepName[2:] not in self._options.donotDropOnInput:
838  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
839  stepName=stepName[2:]
840  if stepSpec=="":
841  getattr(self,"prepare_"+stepName)(stepSpec = getattr(self,stepName+"DefaultSeq"))
842  elif isinstance(stepSpec, list):
843  getattr(self,"prepare_"+stepName)(stepSpec = '+'.join(stepSpec))
844  elif isinstance(stepSpec, tuple):
845  getattr(self,"prepare_"+stepName)(stepSpec = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
846  else:
847  raise ValueError("Invalid step definition")
848 
849  if self._options.restoreRNDSeeds!=False:
850  #it is either True, or a process name
851  if self._options.restoreRNDSeeds==True:
852  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
853  else:
854  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
855  if self._options.inputEventContent or self._options.inputCommands:
856  if self._options.inputCommands:
857  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
858  else:
859  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
860 
861 
862  def completeInputCommand(self):
863  if self._options.inputEventContent:
864  import copy
865  def dropSecondDropStar(iec):
866  #drop occurence of 'drop *' in the list
867  count=0
868  for item in iec:
869  if item=='drop *':
870  if count!=0:
871  iec.remove(item)
872  count+=1
873 
874 
875  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
876  for evct in self._options.inputEventContent.split(','):
877  if evct=='': continue
878  theEventContent = getattr(self.process, evct+"EventContent")
879  if hasattr(theEventContent,'outputCommands'):
880  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
881  if hasattr(theEventContent,'inputCommands'):
882  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
883 
884  dropSecondDropStar(self.process.source.inputCommands)
885 
886  if not self._options.dropDescendant:
887  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
888 
889 
890  return
891 
892  def addConditions(self):
893  """Add conditions to the process"""
894  if not self._options.conditions: return
895 
896  if 'FrontierConditions_GlobalTag' in self._options.conditions:
897  print('using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
898  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
899 
901  from Configuration.AlCa.GlobalTag import GlobalTag
902  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
903  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
904  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
905 
906 
907  def addCustomise(self,unsch=0):
908  """Include the customise code """
909 
910  custOpt=[]
911  if unsch==0:
912  for c in self._options.customisation_file:
913  custOpt.extend(c.split(","))
914  else:
915  for c in self._options.customisation_file_unsch:
916  custOpt.extend(c.split(","))
917 
918  custMap=DictTypes.SortedKeysDict()
919  for opt in custOpt:
920  if opt=='': continue
921  if opt.count('.')>1:
922  raise Exception("more than . in the specification:"+opt)
923  fileName=opt.split('.')[0]
924  if opt.count('.')==0: rest='customise'
925  else:
926  rest=opt.split('.')[1]
927  if rest=='py': rest='customise' #catch the case of --customise file.py
928 
929  if fileName in custMap:
930  custMap[fileName].extend(rest.split('+'))
931  else:
932  custMap[fileName]=rest.split('+')
933 
934  if len(custMap)==0:
935  final_snippet='\n'
936  else:
937  final_snippet='\n# customisation of the process.\n'
938 
939  allFcn=[]
940  for opt in custMap:
941  allFcn.extend(custMap[opt])
942  for fcn in allFcn:
943  if allFcn.count(fcn)!=1:
944  raise Exception("cannot specify twice "+fcn+" as a customisation method")
945 
946  for f in custMap:
947  # let python search for that package and do syntax checking at the same time
948  packageName = f.replace(".py","").replace("/",".")
949  __import__(packageName)
950  package = sys.modules[packageName]
951 
952  # now ask the package for its definition and pick .py instead of .pyc
953  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
954 
955  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
956  if self._options.inline_custom:
957  for line in file(customiseFile,'r'):
958  if "import FWCore.ParameterSet.Config" in line:
959  continue
960  final_snippet += line
961  else:
962  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
963  for fcn in custMap[f]:
964  print("customising the process with",fcn,"from",f)
965  if not hasattr(package,fcn):
966  #bound to fail at run time
967  raise Exception("config "+f+" has no function "+fcn)
968  #execute the command
969  self.process=getattr(package,fcn)(self.process)
970  #and print it in the configuration
971  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
972  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
973 
974  if len(custMap)!=0:
975  final_snippet += '\n# End of customisation functions\n'
976 
977 
978  return final_snippet
979 
980  def addCustomiseCmdLine(self):
981  final_snippet='\n# Customisation from command line\n'
982  if self._options.customise_commands:
983  import string
984  for com in self._options.customise_commands.split('\\n'):
985  com=com.lstrip()
986  self.executeAndRemember(com)
987  final_snippet +='\n'+com
988 
989  return final_snippet
990 
991  #----------------------------------------------------------------------------
992  # here the methods to define the python includes for each step or
993  # conditions
994  #----------------------------------------------------------------------------
995  def define_Configs(self):
996  if len(self.stepMap):
997  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
998  if self._options.particleTable not in defaultOptions.particleTableList:
999  print('Invalid particle table provided. Options are:')
1000  print(defaultOptions.particleTable)
1001  sys.exit(-1)
1002  else:
1003  if len(self.stepMap):
1004  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
1005 
1006  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
1007 
1008  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
1009  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
1010  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
1011  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
1012  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
1013  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
1014  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
1015  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
1016  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
1017  if self._options.isRepacked: self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_DataMapper_cff"
1018  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
1019  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
1020  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
1021  self.RECOSIMDefaultCFF="Configuration/StandardSequences/RecoSim_cff"
1022  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
1023  self.NANODefaultCFF="PhysicsTools/NanoAOD/nano_cff"
1024  self.NANOGENDefaultCFF="PhysicsTools/NanoAOD/nanogen_cff"
1025  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
1026  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
1027  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
1028  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
1029  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
1030  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
1031  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
1032  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
1033  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
1034  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
1035  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
1037  if "DATAMIX" in self.stepMap.keys():
1038  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
1039  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
1040  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
1041  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
1042 
1043  self.ALCADefaultSeq=None
1044  self.LHEDefaultSeq='externalLHEProducer'
1045  self.GENDefaultSeq='pgen'
1046  self.SIMDefaultSeq='psim'
1047  self.DIGIDefaultSeq='pdigi'
1049  self.DIGI2RAWDefaultSeq='DigiToRaw'
1050  self.HLTDefaultSeq='GRun'
1051  self.L1DefaultSeq=None
1056  self.RAW2DIGIDefaultSeq='RawToDigi'
1057  self.L1RecoDefaultSeq='L1Reco'
1058  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
1059  if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
1060  self.RECODefaultSeq='reconstruction'
1061  else:
1062  self.RECODefaultSeq='reconstruction_fromRECO'
1063  self.RECOSIMDefaultSeq='recosim'
1065  self.L1HwValDefaultSeq='L1HwVal'
1066  self.DQMDefaultSeq='DQMOffline'
1068  self.ENDJOBDefaultSeq='endOfProcess'
1069  self.REPACKDefaultSeq='DigiToRawRepack'
1070  self.PATDefaultSeq='miniAOD'
1071  self.PATGENDefaultSeq='miniGEN'
1072  #TODO: Check based of file input
1073  self.NANOGENDefaultSeq='nanogenSequence'
1074  self.NANODefaultSeq='nanoSequence'
1076  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
1078  if not self._options.beamspot:
1079  self._options.beamspot=VtxSmearedDefaultKey
1080 
1081  # if its MC then change the raw2digi
1082  if self._options.isMC==True:
1083  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
1084  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
1085  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
1086  self.PATGENDefaultCFF="Configuration/StandardSequences/PATGEN_cff"
1087  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
1088  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
1089  self.NANODefaultSeq='nanoSequenceMC'
1090  else:
1091  self._options.beamspot = None
1092 
1093  #patch for gen, due to backward incompatibility
1094  if 'reGEN' in self.stepMap:
1095  self.GENDefaultSeq='fixGenInfo'
1096 
1097  if self._options.scenario=='cosmics':
1098  self._options.pileup='Cosmics'
1099  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1100  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1101  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1102  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1103  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1104  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1105  if self._options.isMC==True:
1106  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1107  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1108  self.RECODefaultSeq='reconstructionCosmics'
1109  self.DQMDefaultSeq='DQMOfflineCosmics'
1110 
1111  if self._options.scenario=='HeavyIons':
1112  if not self._options.beamspot:
1113  self._options.beamspot=VtxSmearedHIDefaultKey
1114  self.HLTDefaultSeq = 'HIon'
1115  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1116  self.VALIDATIONDefaultSeq=''
1117  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1118  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1119  self.RECODefaultSeq='reconstructionHeavyIons'
1120  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1121  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1122  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1123  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1124  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1125  if self._options.isMC==True:
1126  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1127 
1128 
1131  self.USERDefaultSeq='user'
1132  self.USERDefaultCFF=None
1134  # the magnetic field
1135  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1136  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1137 
1138  # the geometry
1139  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1141  simGeometry=''
1142  if self._options.fast:
1143  if 'start' in self._options.conditions.lower():
1144  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1145  else:
1146  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1147  else:
1148  def inGeometryKeys(opt):
1149  from Configuration.StandardSequences.GeometryConf import GeometryConf
1150  if opt in GeometryConf:
1151  return GeometryConf[opt]
1152  else:
1153  return opt
1154 
1155  geoms=self._options.geometry.split(',')
1156  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1157  if len(geoms)==2:
1158  #may specify the reco geometry
1159  if '/' in geoms[1] or '_cff' in geoms[1]:
1160  self.GeometryCFF=geoms[1]
1161  else:
1162  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1163 
1164  if (geoms[0].startswith('DB:')):
1165  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1166  self.geometryDBLabel=geoms[0][3:]
1167  print("with DB:")
1168  else:
1169  if '/' in geoms[0] or '_cff' in geoms[0]:
1170  self.SimGeometryCFF=geoms[0]
1171  else:
1172  simGeometry=geoms[0]
1173  if self._options.gflash==True:
1174  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1175  else:
1176  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1177 
1178  # synchronize the geometry configuration and the FullSimulation sequence to be used
1179  if simGeometry not in defaultOptions.geometryExtendedOptions:
1180  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1181 
1182  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1183  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1184  self._options.beamspot='NoSmear'
1185 
1186  # fastsim requires some changes to the default cff files and sequences
1187  if self._options.fast:
1188  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1189  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1190  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1191  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1192  self.NANODefaultSeq = 'nanoSequenceFS'
1193  self.DQMOFFLINEDefaultCFF="DQMOffline.Configuration.DQMOfflineFS_cff"
1194 
1195  # Mixing
1196  if self._options.pileup=='default':
1197  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1198  self._options.pileup=MixingDefaultKey
1199 
1200 
1201  #not driven by a default cff anymore
1202  if self._options.isData:
1203  self._options.pileup=None
1204 
1205 
1208  # for alca, skims, etc
1209  def addExtraStream(self, name, stream, workflow='full'):
1210  # define output module and go from there
1211  output = cms.OutputModule("PoolOutputModule")
1212  if stream.selectEvents.parameters_().__len__()!=0:
1213  output.SelectEvents = stream.selectEvents
1214  else:
1215  output.SelectEvents = cms.untracked.PSet()
1216  output.SelectEvents.SelectEvents=cms.vstring()
1217  if isinstance(stream.paths,tuple):
1218  for path in stream.paths:
1219  output.SelectEvents.SelectEvents.append(path.label())
1220  else:
1221  output.SelectEvents.SelectEvents.append(stream.paths.label())
1222 
1223 
1224 
1225  if isinstance(stream.content,str):
1226  evtPset=getattr(self.process,stream.content)
1227  for p in evtPset.parameters_():
1228  setattr(output,p,getattr(evtPset,p))
1229  if not self._options.inlineEventContent:
1230  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1231  return label
1232  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1233  else:
1234  output.outputCommands = stream.content
1235 
1236 
1237  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1238 
1239  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1240  filterName = cms.untracked.string(stream.name))
1241 
1242  if self._options.filtername:
1243  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1244 
1245  #add an automatic flushing to limit memory consumption
1246  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1247 
1248  if workflow in ("producers,full"):
1249  if isinstance(stream.paths,tuple):
1250  for path in stream.paths:
1251  self.schedule.append(path)
1252  else:
1253  self.schedule.append(stream.paths)
1254 
1255 
1256  # in case of relvals we don't want to have additional outputs
1257  if (not self._options.relval) and workflow in ("full","output"):
1258  self.additionalOutputs[name] = output
1259  setattr(self.process,name,output)
1260 
1261  if workflow == 'output':
1262  # adjust the select events to the proper trigger results from previous process
1263  filterList = output.SelectEvents.SelectEvents
1264  for i, filter in enumerate(filterList):
1265  filterList[i] = filter+":"+self._options.triggerResultsProcess
1266 
1267  return output
1268 
1269  #----------------------------------------------------------------------------
1270  # here the methods to create the steps. Of course we are doing magic here ;)
1271  # prepare_STEPNAME modifies self.process and what else's needed.
1272  #----------------------------------------------------------------------------
1273 
1274  def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ=''):
1275  _dotsplit = stepSpec.split('.')
1276  if ( len(_dotsplit)==1 ):
1277  if '/' in _dotsplit[0]:
1278  _sequence = defaultSEQ if defaultSEQ else stepSpec
1279  _cff = _dotsplit[0]
1280  else:
1281  _sequence = stepSpec
1282  _cff = defaultCFF
1283  elif ( len(_dotsplit)==2 ):
1284  _cff,_sequence = _dotsplit
1285  else:
1286  print("sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1287  print(stepSpec,"not recognized")
1288  raise
1289  l=self.loadAndRemember(_cff)
1290  return l,_sequence,_cff
1291 
1292  def scheduleSequence(self,seq,prefix,what='Path'):
1293  if '*' in seq:
1294  #create only one path with all sequences in it
1295  for i,s in enumerate(seq.split('*')):
1296  if i==0:
1297  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1298  else:
1299  p=getattr(self.process,prefix)
1300  tmp = getattr(self.process, s)
1301  if isinstance(tmp, cms.Task):
1302  p.associate(tmp)
1303  else:
1304  p+=tmp
1305  self.schedule.append(getattr(self.process,prefix))
1306  return
1307  else:
1308  #create as many path as many sequences
1309  if not '+' in seq:
1310  if self.nextScheduleIsConditional:
1311  self.conditionalPaths.append(prefix)
1312  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1313  self.schedule.append(getattr(self.process,prefix))
1314  else:
1315  for i,s in enumerate(seq.split('+')):
1316  sn=prefix+'%d'%(i)
1317  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1318  self.schedule.append(getattr(self.process,sn))
1319  return
1320 
1321  def scheduleSequenceAtEnd(self,seq,prefix):
1322  self.scheduleSequence(seq,prefix,what='EndPath')
1323  return
1324 
1325  def prepare_ALCAPRODUCER(self, stepSpec = None):
1326  self.prepare_ALCA(stepSpec, workflow = "producers")
1327 
1328  def prepare_ALCAOUTPUT(self, stepSpec = None):
1329  self.prepare_ALCA(stepSpec, workflow = "output")
1330 
1331  def prepare_ALCA(self, stepSpec = None, workflow = 'full'):
1332  """ Enrich the process with alca streams """
1333  alcaConfig,sequence,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.ALCADefaultCFF)
1334 
1335  MAXLEN=31 #the alca producer name should be shorter than 31 chars as per https://cms-talk.web.cern.ch/t/alcaprompt-datasets-not-loaded-in-dbs/11146/2
1336  # decide which ALCA paths to use
1337  alcaList = sequence.split("+")
1338  for alca in alcaList:
1339  if (len(alca)>MAXLEN):
1340  raise Exception("The following alca "+str(alca)+" name (with length "+str(len(alca))+" chars) cannot be accepted because it exceeds the DBS constraints on the length of the name of the ALCARECOs producers ("+str(MAXLEN)+")!")
1341 
1342  maxLevel=0
1343  from Configuration.AlCa.autoAlca import autoAlca, AlCaNoConcurrentLumis
1344  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1345  self.expandMapping(alcaList,autoAlca)
1346  self.AlCaPaths=[]
1347  for name in alcaConfig.__dict__:
1348  alcastream = getattr(alcaConfig,name)
1349  shortName = name.replace('ALCARECOStream','')
1350  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1351  if shortName in AlCaNoConcurrentLumis:
1352  print("Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".format(shortName))
1353  self._options.nConcurrentLumis = "1"
1354  self._options.nConcurrentIOVs = "1"
1355  output = self.addExtraStream(name,alcastream, workflow = workflow)
1356  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1357  self.AlCaPaths.append(shortName)
1358  if 'DQM' in alcaList:
1359  if not self._options.inlineEventContent and hasattr(self.process,name):
1360  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1361  else:
1362  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1363 
1364  #rename the HLT process name in the alca modules
1365  if self._options.hltProcess or 'HLT' in self.stepMap:
1366  if isinstance(alcastream.paths,tuple):
1367  for path in alcastream.paths:
1368  self.renameHLTprocessInSequence(path.label())
1369  else:
1370  self.renameHLTprocessInSequence(alcastream.paths.label())
1371 
1372  for i in range(alcaList.count(shortName)):
1373  alcaList.remove(shortName)
1374 
1375  # DQM needs a special handling
1376  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1377  path = getattr(alcaConfig,name)
1378  self.schedule.append(path)
1379  alcaList.remove('DQM')
1380 
1381  if isinstance(alcastream,cms.Path):
1382  #black list the alca path so that they do not appear in the cfg
1383  self.blacklist_paths.append(alcastream)
1384 
1385 
1386  if len(alcaList) != 0:
1387  available=[]
1388  for name in alcaConfig.__dict__:
1389  alcastream = getattr(alcaConfig,name)
1390  if isinstance(alcastream,cms.FilteredStream):
1391  available.append(name.replace('ALCARECOStream',''))
1392  print("The following alcas could not be found "+str(alcaList))
1393  print("available ",available)
1394  #print "verify your configuration, ignoring for now"
1395  raise Exception("The following alcas could not be found "+str(alcaList))
1396 
1397  def prepare_LHE(self, stepSpec = None):
1398  #load the fragment
1399 
1400  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1401  print("Loading lhe fragment from",loadFragment)
1402  __import__(loadFragment)
1403  self.process.load(loadFragment)
1404 
1405  self._options.inlineObjets+=','+stepSpec
1406 
1407  getattr(self.process,stepSpec).nEvents = int(self._options.number)
1408 
1409  #schedule it
1410  self.process.lhe_step = cms.Path( getattr( self.process,stepSpec) )
1411  self.excludedPaths.append("lhe_step")
1412  self.schedule.append( self.process.lhe_step )
1413 
1414  def prepare_GEN(self, stepSpec = None):
1415  """ load the fragment of generator configuration """
1416  loadFailure=False
1417  #remove trailing .py
1418  #support old style .cfi by changing into something.cfi into something_cfi
1419  #remove python/ from the name
1420  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1421  #standard location of fragments
1422  if not '/' in loadFragment:
1423  loadFragment='Configuration.Generator.'+loadFragment
1424  else:
1425  loadFragment=loadFragment.replace('/','.')
1426  try:
1427  print("Loading generator fragment from",loadFragment)
1428  __import__(loadFragment)
1429  except:
1430  loadFailure=True
1431  #if self.process.source and self.process.source.type_()=='EmptySource':
1432  if not (self._options.filein or self._options.dasquery):
1433  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1434 
1435  if not loadFailure:
1436  from Configuration.Generator.concurrentLumisDisable import noConcurrentLumiGenerators
1437 
1438  generatorModule=sys.modules[loadFragment]
1439  genModules=generatorModule.__dict__
1440  #remove lhe producer module since this should have been
1441  #imported instead in the LHE step
1442  if self.LHEDefaultSeq in genModules:
1443  del genModules[self.LHEDefaultSeq]
1444 
1445  if self._options.hideGen:
1446  self.loadAndRemember(loadFragment)
1447  else:
1448  self.process.load(loadFragment)
1449  # expose the objects from that fragment to the configuration
1450  import FWCore.ParameterSet.Modules as cmstypes
1451  for name in genModules:
1452  theObject = getattr(generatorModule,name)
1453  if isinstance(theObject, cmstypes._Module):
1454  self._options.inlineObjets=name+','+self._options.inlineObjets
1455  if theObject.type_() in noConcurrentLumiGenerators:
1456  print("Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".format(theObject.type_()))
1457  self._options.nConcurrentLumis = "1"
1458  self._options.nConcurrentIOVs = "1"
1459  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1460  self._options.inlineObjets+=','+name
1461 
1462  if stepSpec == self.GENDefaultSeq or stepSpec == 'pgen_genonly':
1463  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1464  self.productionFilterSequence = 'ProductionFilterSequence'
1465  elif 'generator' in genModules:
1466  self.productionFilterSequence = 'generator'
1467 
1468  """ Enrich the schedule with the rest of the generation step """
1469  _,_genSeqName,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.GENDefaultCFF)
1470 
1471  if True:
1472  try:
1473  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1474  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1475  self.loadAndRemember(cffToBeLoaded)
1476  except ImportError:
1477  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1478 
1479  if self._options.scenario == 'HeavyIons':
1480  if self._options.pileup=='HiMixGEN':
1481  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1482  elif self._options.pileup=='HiMixEmbGEN':
1483  self.loadAndRemember("Configuration/StandardSequences/GeneratorEmbMix_cff")
1484  else:
1485  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1486 
1487  self.process.generation_step = cms.Path( getattr(self.process,_genSeqName) )
1488  self.schedule.append(self.process.generation_step)
1489 
1490  #register to the genstepfilter the name of the path (static right now, but might evolve)
1491  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1492 
1493  if 'reGEN' in self.stepMap or stepSpec == 'pgen_smear':
1494  #stop here
1495  return
1496 
1497  """ Enrich the schedule with the summary of the filter step """
1498  #the gen filter in the endpath
1499  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1500  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1501  return
1502 
1503  def prepare_SIM(self, stepSpec = None):
1504  """ Enrich the schedule with the simulation step"""
1505  _,_simSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.SIMDefaultCFF)
1506  if not self._options.fast:
1507  if self._options.gflash==True:
1508  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1509 
1510  if self._options.magField=='0T':
1511  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1512  else:
1513  if self._options.magField=='0T':
1514  self.executeAndRemember("process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1515 
1516  self.scheduleSequence(_simSeq,'simulation_step')
1517  return
1518 
1519  def prepare_DIGI(self, stepSpec = None):
1520  """ Enrich the schedule with the digitisation step"""
1521  _,_digiSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DIGIDefaultCFF)
1522 
1523  if self._options.gflash==True:
1524  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1525 
1526  if _digiSeq == 'pdigi_valid' or _digiSeq == 'pdigi_hi':
1527  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1528 
1529  if _digiSeq != 'pdigi_nogen' and _digiSeq != 'pdigi_valid_nogen' and _digiSeq != 'pdigi_hi_nogen' and not self.process.source.type_()=='EmptySource' and not self._options.filetype == "LHE":
1530  if self._options.inputEventContent=='':
1531  self._options.inputEventContent='REGEN'
1532  else:
1533  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1534 
1535 
1536  self.scheduleSequence(_digiSeq,'digitisation_step')
1537  return
1538 
1539  def prepare_CFWRITER(self, stepSpec = None):
1540  """ Enrich the schedule with the crossing frame writer step"""
1542  self.scheduleSequence('pcfw','cfwriter_step')
1543  return
1544 
1545  def prepare_DATAMIX(self, stepSpec = None):
1546  """ Enrich the schedule with the digitisation step"""
1548  self.scheduleSequence('pdatamix','datamixing_step')
1549 
1550  if self._options.pileup_input:
1551  theFiles=''
1552  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1553  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1554  elif self._options.pileup_input.startswith("filelist:"):
1555  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1556  else:
1557  theFiles=self._options.pileup_input.split(',')
1558  #print theFiles
1559  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1560 
1561  return
1562 
1563  def prepare_DIGI2RAW(self, stepSpec = None):
1564  _,_digi2rawSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DIGI2RAWDefaultCFF)
1565  self.scheduleSequence(_digi2rawSeq,'digi2raw_step')
1566  return
1567 
1568  def prepare_REPACK(self, stepSpec = None):
1569  _,_repackSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.REPACKDefaultCFF)
1570  self.scheduleSequence(_repackSeq,'digi2repack_step')
1571  return
1572 
1573  def prepare_L1(self, stepSpec = None):
1574  """ Enrich the schedule with the L1 simulation step"""
1575  assert(stepSpec == None)
1576  self.loadAndRemember(self.L1EMDefaultCFF)
1577  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1578  return
1579 
1580  def prepare_L1REPACK(self, stepSpec = None):
1581  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1582  supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT','CalouGT']
1583  if stepSpec in supported:
1584  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'% stepSpec)
1585  if self._options.scenario == 'HeavyIons':
1586  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1587  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1588  else:
1589  print("L1REPACK with '",stepSpec,"' is not supported! Supported choices are: ",supported)
1590  raise Exception('unsupported feature')
1591 
1592  def prepare_HLT(self, stepSpec = None):
1593  """ Enrich the schedule with the HLT simulation step"""
1594  if not stepSpec:
1595  print("no specification of the hlt menu has been given, should never happen")
1596  raise Exception('no HLT specifications provided')
1597 
1598  if '@' in stepSpec:
1599  # case where HLT:@something was provided
1600  from Configuration.HLT.autoHLT import autoHLT
1601  key = stepSpec[1:]
1602  if key in autoHLT:
1603  stepSpec = autoHLT[key]
1604  else:
1605  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1606 
1607  if ',' in stepSpec:
1608  #case where HLT:something:something was provided
1609  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1610  optionsForHLT = {}
1611  if self._options.scenario == 'HeavyIons':
1612  optionsForHLT['type'] = 'HIon'
1613  else:
1614  optionsForHLT['type'] = 'GRun'
1615  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.items())
1616  if stepSpec == 'run,fromSource':
1617  if hasattr(self.process.source,'firstRun'):
1618  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1619  elif hasattr(self.process.source,'setRunNumber'):
1620  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1621  else:
1622  raise Exception(f'Cannot replace menu to load {stepSpec}')
1623  else:
1624  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(stepSpec.replace(',',':'),optionsForHLTConfig))
1625  else:
1626  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % stepSpec)
1627 
1628  if self._options.isMC:
1629  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1630 
1631  if self._options.name != 'HLT':
1632  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1633  self.additionalCommands.append('process = ProcessName(process)')
1634  self.additionalCommands.append('')
1635  from HLTrigger.Configuration.CustomConfigs import ProcessName
1636  self.process = ProcessName(self.process)
1637 
1638  if self.process.schedule == None:
1639  raise Exception('the HLT step did not attach a valid schedule to the process')
1640 
1641  self.scheduleIndexOfFirstHLTPath = len(self.schedule)
1642  [self.blacklist_paths.append(path) for path in self.process.schedule if isinstance(path,(cms.Path,cms.EndPath))]
1643 
1644  # this is a fake, to be removed with fastim migration and HLT menu dump
1645  if self._options.fast:
1646  if not hasattr(self.process,'HLTEndSequence'):
1647  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1648 
1649 
1650  def prepare_RAW2RECO(self, stepSpec = None):
1651  if ','in stepSpec:
1652  seqReco,seqDigi=stepSpec.spli(',')
1653  else:
1654  print(f"RAW2RECO requires two specifications {stepSpec} insufficient")
1655 
1656  self.prepare_RAW2DIGI(seqDigi)
1657  self.prepare_RECO(seqReco)
1658  return
1659 
1660  def prepare_RAW2DIGI(self, stepSpec = "RawToDigi"):
1661  _,_raw2digiSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RAW2DIGIDefaultCFF)
1662  self.scheduleSequence(_raw2digiSeq,'raw2digi_step')
1663  return
1664 
1665  def prepare_PATFILTER(self, stepSpec = None):
1666  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1667  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1668  for filt in allMetFilterPaths:
1669  self.schedule.append(getattr(self.process,'Flag_'+filt))
1670 
1671  def prepare_L1HwVal(self, stepSpec = 'L1HwVal'):
1672  ''' Enrich the schedule with L1 HW validation '''
1673  self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1HwValDefaultCFF)
1674  print('\n\n\n DEPRECATED this has no action \n\n\n')
1675  return
1676 
1677  def prepare_L1Reco(self, stepSpec = "L1Reco"):
1678  ''' Enrich the schedule with L1 reconstruction '''
1679  _,_l1recoSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1RecoDefaultCFF)
1680  self.scheduleSequence(_l1recoSeq,'L1Reco_step')
1681  return
1682 
1683  def prepare_L1TrackTrigger(self, stepSpec = "L1TrackTrigger"):
1684  ''' Enrich the schedule with L1 reconstruction '''
1685  _,_l1tracktriggerSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1TrackTriggerDefaultCFF)
1686  self.scheduleSequence(_l1tracktriggerSeq,'L1TrackTrigger_step')
1687  return
1688 
1689  def prepare_FILTER(self, stepSpec = None):
1690  ''' Enrich the schedule with a user defined filter sequence '''
1691 
1692  filterConfig,filterSeq = stepSpec.split('.')
1693  filterConfig=self.load(filterConfig)
1694 
1695  class PrintAllModules(object):
1696  def __init__(self):
1697  self.inliner=''
1698  pass
1699  def enter(self,visitee):
1700  try:
1701  label=visitee.label()
1702 
1703  self.inliner=label+','+self.inliner
1704  except:
1705  pass
1706  def leave(self,v): pass
1707 
1708  expander=PrintAllModules()
1709  getattr(self.process,filterSeq).visit( expander )
1710  self._options.inlineObjets+=','+expander.inliner
1711  self._options.inlineObjets+=','+filterSeq
1712 
1713 
1714  self.scheduleSequence(filterSeq,'filtering_step')
1715  self.nextScheduleIsConditional=True
1716 
1717  self.productionFilterSequence = filterSeq
1718 
1719  return
1720 
1721  def prepare_RECO(self, stepSpec = "reconstruction"):
1722  ''' Enrich the schedule with reconstruction '''
1723  _,_recoSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RECODefaultCFF)
1724  self.scheduleSequence(_recoSeq,'reconstruction_step')
1725  return
1726 
1727  def prepare_RECOSIM(self, stepSpec = "recosim"):
1728  ''' Enrich the schedule with reconstruction '''
1729  _,_recosimSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RECOSIMDefaultCFF)
1730  self.scheduleSequence(_recosimSeq,'recosim_step')
1731  return
1732 
1733  def prepare_RECOBEFMIX(self, stepSpec = "reconstruction"):
1734  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1735  if not self._options.fast:
1736  print("ERROR: this step is only implemented for FastSim")
1737  sys.exit()
1738  _,_recobefmixSeq,_ = self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1739  self.scheduleSequence(_recobefmixSeq,'reconstruction_befmix_step')
1740  return
1741 
1742  def prepare_PAT(self, stepSpec = "miniAOD"):
1743  ''' Enrich the schedule with PAT '''
1744  self.prepare_PATFILTER(self)
1745  self.loadDefaultOrSpecifiedCFF(stepSpec,self.PATDefaultCFF)
1746  self.labelsToAssociate.append('patTask')
1747  if self._options.isData:
1748  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1749  else:
1750  if self._options.fast:
1751  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1752  else:
1753  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1754 
1755  if self._options.hltProcess:
1756  if len(self._options.customise_commands) > 1:
1757  self._options.customise_commands = self._options.customise_commands + " \n"
1758  self._options.customise_commands = self._options.customise_commands + "process.patTrigger.processName = \""+self._options.hltProcess+"\"\n"
1759  self._options.customise_commands = self._options.customise_commands + "process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1760  self._options.customise_commands = self._options.customise_commands + "process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1761 
1762 # self.renameHLTprocessInSequence(sequence)
1763 
1764  return
1765 
1766  def prepare_PATGEN(self, stepSpec = "miniGEN"):
1767  ''' Enrich the schedule with PATGEN '''
1768  self.loadDefaultOrSpecifiedCFF(stepSpec,self.PATGENDefaultCFF) #this is unscheduled
1769  self.labelsToAssociate.append('patGENTask')
1770  if self._options.isData:
1771  raise Exception("PATGEN step can only run on MC")
1772  return
1773 
1774  def prepare_NANO(self, stepSpec = '' ):
1775  print(f"in prepare_nano {stepSpec}")
1776  ''' Enrich the schedule with NANO '''
1777  _,_nanoSeq,_nanoCff = self.loadDefaultOrSpecifiedCFF(stepSpec,self.NANODefaultCFF,self.NANODefaultSeq)
1778  self.scheduleSequence(_nanoSeq,'nanoAOD_step')
1779  custom = "nanoAOD_customizeCommon"
1780  self._options.customisation_file.insert(0,'.'.join([_nanoCff,custom]))
1781  if self._options.hltProcess:
1782  if len(self._options.customise_commands) > 1:
1783  self._options.customise_commands = self._options.customise_commands + " \n"
1784  self._options.customise_commands = self._options.customise_commands + "process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1785 
1786  def prepare_NANOGEN(self, stepSpec = "nanoAOD"):
1787  ''' Enrich the schedule with NANOGEN '''
1788  # TODO: Need to modify this based on the input file type
1789  fromGen = any([x in self.stepMap for x in ['LHE', 'GEN', 'AOD']])
1790  _,_nanogenSeq,_nanogenCff = self.loadDefaultOrSpecifiedCFF(stepSpec,self.NANOGENDefaultCFF)
1791  self.scheduleSequence(_nanogenSeq,'nanoAOD_step')
1792  custom = "customizeNanoGEN" if fromGen else "customizeNanoGENFromMini"
1793  if self._options.runUnscheduled:
1794  self._options.customisation_file_unsch.insert(0, '.'.join([_nanogenCff, custom]))
1795  else:
1796  self._options.customisation_file.insert(0, '.'.join([_nanogenCff, custom]))
1797 
1798  def prepare_SKIM(self, stepSpec = "all"):
1799  ''' Enrich the schedule with skimming fragments'''
1800  skimConfig,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.SKIMDefaultCFF)
1801 
1802  stdHLTProcName = 'HLT'
1803  newHLTProcName = self._options.hltProcess
1804  customiseForReHLT = (newHLTProcName or (stdHLTProcName in self.stepMap)) and (newHLTProcName != stdHLTProcName)
1805  if customiseForReHLT:
1806  print("replacing %s process name - step SKIM:%s will use '%s'" % (stdHLTProcName, sequence, newHLTProcName))
1807 
1808 
1809  from Configuration.Skimming.autoSkim import autoSkim
1810  skimlist = sequence.split('+')
1811  self.expandMapping(skimlist,autoSkim)
1812 
1813  #print("dictionary for skims:", skimConfig.__dict__)
1814  for skim in skimConfig.__dict__:
1815  skimstream = getattr(skimConfig, skim)
1816 
1817  # blacklist AlCa paths so that they do not appear in the cfg
1818  if isinstance(skimstream, cms.Path):
1819  self.blacklist_paths.append(skimstream)
1820  # if enabled, apply "hltProcess" renaming to Sequences
1821  elif isinstance(skimstream, cms.Sequence):
1822  if customiseForReHLT:
1823  self.renameHLTprocessInSequence(skim, proc = newHLTProcName, HLTprocess = stdHLTProcName, verbosityLevel = 0)
1824 
1825  if not isinstance(skimstream, cms.FilteredStream):
1826  continue
1827 
1828  shortname = skim.replace('SKIMStream','')
1829  if (sequence=="all"):
1830  self.addExtraStream(skim,skimstream)
1831  elif (shortname in skimlist):
1832  self.addExtraStream(skim,skimstream)
1833  #add a DQM eventcontent for this guy
1834  if self._options.datatier=='DQM':
1835  self.process.load(self.EVTCONTDefaultCFF)
1836  skimstreamDQM = cms.FilteredStream(
1837  responsible = skimstream.responsible,
1838  name = skimstream.name+'DQM',
1839  paths = skimstream.paths,
1840  selectEvents = skimstream.selectEvents,
1841  content = self._options.datatier+'EventContent',
1842  dataTier = cms.untracked.string(self._options.datatier)
1843  )
1844  self.addExtraStream(skim+'DQM',skimstreamDQM)
1845  for i in range(skimlist.count(shortname)):
1846  skimlist.remove(shortname)
1847 
1848  if (skimlist.__len__()!=0 and sequence!="all"):
1849  print('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1850  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1851 
1852 
1853  def prepare_USER(self, stepSpec = None):
1854  ''' Enrich the schedule with a user defined sequence '''
1855  _,_userSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.USERDefaultCFF)
1856  self.scheduleSequence(_userSeq,'user_step')
1857  return
1858 
1859  def prepare_POSTRECO(self, stepSpec = None):
1860  """ Enrich the schedule with the postreco step """
1862  self.scheduleSequence('postreco_generator','postreco_step')
1863  return
1864 
1865 
1866  def prepare_VALIDATION(self, stepSpec = 'validation'):
1867  print(f"{stepSpec} in preparing validation")
1868  _,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.VALIDATIONDefaultCFF)
1869  from Validation.Configuration.autoValidation import autoValidation
1870  #in case VALIDATION:something:somethingelse -> something,somethingelse
1871  if sequence.find(',')!=-1:
1872  prevalSeqName=sequence.split(',')[0].split('+')
1873  valSeqName=sequence.split(',')[1].split('+')
1874  self.expandMapping(prevalSeqName,autoValidation,index=0)
1875  self.expandMapping(valSeqName,autoValidation,index=1)
1876  else:
1877  if '@' in sequence:
1878  prevalSeqName=sequence.split('+')
1879  valSeqName=sequence.split('+')
1880  self.expandMapping(prevalSeqName,autoValidation,index=0)
1881  self.expandMapping(valSeqName,autoValidation,index=1)
1882  else:
1883  postfix=''
1884  if sequence:
1885  postfix='_'+sequence
1886  prevalSeqName=['prevalidation'+postfix]
1887  valSeqName=['validation'+postfix]
1888  if not hasattr(self.process,valSeqName[0]):
1889  prevalSeqName=['']
1890  valSeqName=[sequence]
1891 
1892  def NFI(index):
1893 
1894  if index==0:
1895  return ''
1896  else:
1897  return '%s'%index
1898 
1899 
1900  #rename the HLT process in validation steps
1901  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1902  for s in valSeqName+prevalSeqName:
1903  if s:
1905  for (i,s) in enumerate(prevalSeqName):
1906  if s:
1907  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1908  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1909 
1910  for (i,s) in enumerate(valSeqName):
1911  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1912  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1913 
1914  #needed in case the miniAODValidation sequence is run starting from AODSIM
1915  if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1916  return
1917 
1918  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1919  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1920  self._options.restoreRNDSeeds=True
1921 
1922  if not 'DIGI' in self.stepMap and not self._options.isData and not self._options.fast:
1923  self.executeAndRemember("process.mix.playback = True")
1924  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1925  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1926  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1927 
1928  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1929  #will get in the schedule, smoothly
1930  for (i,s) in enumerate(valSeqName):
1931  getattr(self.process,'validation_step%s'%NFI(i)).insert(0, self.process.genstepfilter)
1932 
1933  return
1934 
1935 
1937  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1938  It will climb down within PSets, VPSets and VInputTags to find its target"""
1939  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1940  self._paramReplace = paramReplace
1941  self._paramSearch = paramSearch
1942  self._verbose = verbose
1943  self._whitelist = whitelist
1945  def doIt(self, pset, base):
1946  if isinstance(pset, cms._Parameterizable):
1947  for name in pset.parameters_().keys():
1948  # skip whitelisted parameters
1949  if name in self._whitelist:
1950  continue
1951  # if I use pset.parameters_().items() I get copies of the parameter values
1952  # so I can't modify the nested pset
1953  value = getattr(pset, name)
1954  valueType = type(value)
1955  if valueType in [cms.PSet, cms.untracked.PSet, cms.EDProducer]:
1956  self.doIt(value,base+"."+name)
1957  elif valueType in [cms.VPSet, cms.untracked.VPSet]:
1958  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1959  elif valueType in [cms.string, cms.untracked.string]:
1960  if value.value() == self._paramSearch:
1961  if self._verbose: print("set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace))
1962  setattr(pset, name,self._paramReplace)
1963  elif valueType in [cms.VInputTag, cms.untracked.VInputTag]:
1964  for (i,n) in enumerate(value):
1965  if not isinstance(n, cms.InputTag):
1966  n=cms.InputTag(n)
1967  if n.processName == self._paramSearch:
1968  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1969  if self._verbose:print("set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace))
1970  setattr(n,"processName",self._paramReplace)
1971  value[i]=n
1972  elif valueType in [cms.vstring, cms.untracked.vstring]:
1973  for (i,n) in enumerate(value):
1974  if n==self._paramSearch:
1975  getattr(pset,name)[i]=self._paramReplace
1976  elif valueType in [cms.InputTag, cms.untracked.InputTag]:
1977  if value.processName == self._paramSearch:
1978  if self._verbose: print("set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace))
1979  setattr(getattr(pset, name),"processName",self._paramReplace)
1980 
1981  def enter(self,visitee):
1982  label = ''
1983  try:
1984  label = visitee.label()
1985  except AttributeError:
1986  label = '<Module not in a Process>'
1987  except:
1988  label = 'other execption'
1989  self.doIt(visitee, label)
1990 
1991  def leave(self,visitee):
1992  pass
1993 
1994  #visit a sequence to repalce all input tags
1995  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1996  print("Replacing all InputTag %s => %s"%(oldT,newT))
1997  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1998  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1999  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
2000  if not loadMe in self.additionalCommands:
2001  self.additionalCommands.append(loadMe)
2002  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
2003 
2004  #change the process name used to address HLT results in any sequence
2005  def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT', verbosityLevel=1):
2006  if proc == None:
2007  proc = self._options.hltProcess if self._options.hltProcess else self.process.name_()
2008  if proc == HLTprocess:
2009  return
2010  # look up all module in sequence
2011  if verbosityLevel > 0:
2012  print("replacing %s process name - sequence %s will use '%s'" % (HLTprocess, sequence, proc))
2013  verboseVisit = (verbosityLevel > 1)
2014  getattr(self.process,sequence).visit(
2015  ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess, proc, whitelist = ("subSystemFolder",), verbose = verboseVisit))
2016  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
2017  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
2019  'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",), verbose = %s))'
2020  % (sequence, HLTprocess, proc, verboseVisit))
2021 
2022  def expandMapping(self,seqList,mapping,index=None):
2023  maxLevel=30
2024  level=0
2025  while '@' in repr(seqList) and level<maxLevel:
2026  level+=1
2027  for specifiedCommand in seqList:
2028  if specifiedCommand.startswith('@'):
2029  location=specifiedCommand[1:]
2030  if not location in mapping:
2031  raise Exception("Impossible to map "+location+" from "+repr(mapping))
2032  mappedTo=mapping[location]
2033  if index!=None:
2034  mappedTo=mappedTo[index]
2035  seqList.remove(specifiedCommand)
2036  seqList.extend(mappedTo.split('+'))
2037  break;
2038  if level==maxLevel:
2039  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
2040 
2041  def prepare_DQM(self, stepSpec = 'DQMOffline'):
2042  # this one needs replacement
2043 
2044  # any 'DQM' job should use DQMStore in non-legacy mode (but not HARVESTING)
2045  self.loadAndRemember("DQMServices/Core/DQMStoreNonLegacy_cff")
2046  _,_dqmSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DQMOFFLINEDefaultCFF)
2047  sequenceList=_dqmSeq.split('+')
2048  postSequenceList=_dqmSeq.split('+')
2049  from DQMOffline.Configuration.autoDQM import autoDQM
2050  self.expandMapping(sequenceList,autoDQM,index=0)
2051  self.expandMapping(postSequenceList,autoDQM,index=1)
2052 
2053  if len(set(sequenceList))!=len(sequenceList):
2054  sequenceList=list(set(sequenceList))
2055  print("Duplicate entries for DQM:, using",sequenceList)
2056 
2057  pathName='dqmoffline_step'
2058  for (i,_sequence) in enumerate(sequenceList):
2059  if (i!=0):
2060  pathName='dqmoffline_%d_step'%(i)
2061 
2062  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
2063  self.renameHLTprocessInSequence(_sequence)
2064 
2065  setattr(self.process,pathName, cms.EndPath( getattr(self.process,_sequence ) ) )
2066  self.schedule.append(getattr(self.process,pathName))
2067 
2068  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
2069  #will get in the schedule, smoothly
2070  getattr(self.process,pathName).insert(0,self.process.genstepfilter)
2071 
2072 
2073  pathName='dqmofflineOnPAT_step'
2074  for (i,_sequence) in enumerate(postSequenceList):
2075  #Fix needed to avoid duplication of sequences not defined in autoDQM or without a PostDQM
2076  if (sequenceList[i]==postSequenceList[i]):
2077  continue
2078  if (i!=0):
2079  pathName='dqmofflineOnPAT_%d_step'%(i)
2080 
2081  setattr(self.process,pathName, cms.EndPath( getattr(self.process, _sequence ) ) )
2082  self.schedule.append(getattr(self.process,pathName))
2083 
2084  def prepare_HARVESTING(self, stepSpec = None):
2085  """ Enrich the process with harvesting step """
2086  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
2088 
2089  harvestingConfig,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.HARVESTINGDefaultCFF)
2090 
2091  # decide which HARVESTING paths to use
2092  harvestingList = sequence.split("+")
2093  from DQMOffline.Configuration.autoDQM import autoDQM
2094  from Validation.Configuration.autoValidation import autoValidation
2095  import copy
2096  combined_mapping = copy.deepcopy( autoDQM )
2097  combined_mapping.update( autoValidation )
2098  self.expandMapping(harvestingList,combined_mapping,index=-1)
2099 
2100  if len(set(harvestingList))!=len(harvestingList):
2101  harvestingList=list(set(harvestingList))
2102  print("Duplicate entries for HARVESTING, using",harvestingList)
2103 
2104  for name in harvestingList:
2105  if not name in harvestingConfig.__dict__:
2106  print(name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2107  # trigger hard error, like for other sequence types
2108  getattr(self.process, name)
2109  continue
2110  harvestingstream = getattr(harvestingConfig,name)
2111  if isinstance(harvestingstream,cms.Path):
2112  self.schedule.append(harvestingstream)
2113  self.blacklist_paths.append(harvestingstream)
2114  if isinstance(harvestingstream,cms.Sequence):
2115  setattr(self.process,name+"_step",cms.Path(harvestingstream))
2116  self.schedule.append(getattr(self.process,name+"_step"))
2117 
2118  # # NOTE: the "hltProcess" option currently does nothing in the HARVEST step
2119  # if self._options.hltProcess or ('HLT' in self.stepMap):
2120  # pass
2121 
2122  self.scheduleSequence('DQMSaver','dqmsave_step')
2123  return
2124 
2125  def prepare_ALCAHARVEST(self, stepSpec = None):
2126  """ Enrich the process with AlCaHarvesting step """
2127  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2128  sequence=stepSpec.split(".")[-1]
2129 
2130  # decide which AlcaHARVESTING paths to use
2131  harvestingList = sequence.split("+")
2132 
2133 
2134 
2135  from Configuration.AlCa.autoPCL import autoPCL
2136  self.expandMapping(harvestingList,autoPCL)
2137 
2138  for name in harvestingConfig.__dict__:
2139  harvestingstream = getattr(harvestingConfig,name)
2140  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2141  self.schedule.append(harvestingstream)
2142  if isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_dbOutput"), cms.VPSet) and \
2143  isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_metadata"), cms.VPSet):
2144  self.executeAndRemember("process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name + "_dbOutput)")
2145  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name + "_metadata)")
2146  else:
2147  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2148  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2149  harvestingList.remove(name)
2150  # append the common part at the end of the sequence
2151  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2152  self.schedule.append(lastStep)
2153 
2154  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2155  print("The following harvesting could not be found : ", harvestingList)
2156  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2157 
2158 
2159 
2160  def prepare_ENDJOB(self, stepSpec = 'endOfProcess'):
2161  _,_endjobSeq,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.ENDJOBDefaultCFF)
2162  self.scheduleSequenceAtEnd(_endjobSeq,'endjob_step')
2163  return
2164 
2165  def finalizeFastSimHLT(self):
2166  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2167  self.schedule.append(self.process.reconstruction)
2168 
2169 
2170  def build_production_info(self, evt_type, evtnumber):
2171  """ Add useful info for the production. """
2172  self.process.configurationMetadata=cms.untracked.PSet\
2173  (version=cms.untracked.string("$Revision: 1.19 $"),
2174  name=cms.untracked.string("Applications"),
2175  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2176  )
2177 
2178  self.addedObjects.append(("Production Info","configurationMetadata"))
2179 
2180 
2181  def create_process(self):
2182  self.pythonCfgCode = "# Auto generated configuration file\n"
2183  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2184  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2185  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2186 
2187  # now set up the modifies
2188  modifiers=[]
2189  modifierStrings=[]
2190  modifierImports=[]
2191 
2192  if hasattr(self._options,"era") and self._options.era :
2193  # Multiple eras can be specified in a comma seperated list
2194  from Configuration.StandardSequences.Eras import eras
2195  for requestedEra in self._options.era.split(",") :
2196  modifierStrings.append(requestedEra)
2197  modifierImports.append(eras.pythonCfgLines[requestedEra])
2198  modifiers.append(getattr(eras,requestedEra))
2199 
2200 
2201  if hasattr(self._options,"procModifiers") and self._options.procModifiers:
2202  import importlib
2203  thingsImported=[]
2204  for c in self._options.procModifiers:
2205  thingsImported.extend(c.split(","))
2206  for pm in thingsImported:
2207  modifierStrings.append(pm)
2208  modifierImports.append('from Configuration.ProcessModifiers.'+pm+'_cff import '+pm)
2209  modifiers.append(getattr(importlib.import_module('Configuration.ProcessModifiers.'+pm+'_cff'),pm))
2210 
2211  self.pythonCfgCode += '\n'.join(modifierImports)+'\n\n'
2212  self.pythonCfgCode += "process = cms.Process('"+self._options.name+"'" # Start of the line, finished after the loop
2213 
2214 
2215  if len(modifierStrings)>0:
2216  self.pythonCfgCode+= ','+','.join(modifierStrings)
2217  self.pythonCfgCode+=')\n\n'
2218 
2219  #yes, the cfg code gets out of sync here if a process is passed in. That could be fixed in the future
2220  #assuming there is some way for the fwk to get the list of modifiers (and their stringified name)
2221  if self.process == None:
2222  if len(modifiers)>0:
2223  self.process = cms.Process(self._options.name,*modifiers)
2224  else:
2225  self.process = cms.Process(self._options.name)
2226 
2227 
2228 
2229 
2230  def prepare(self, doChecking = False):
2231  """ Prepare the configuration string and add missing pieces."""
2232 
2233  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2234  self.addMaxEvents()
2235  if self.with_input:
2236  self.addSource()
2237  self.addStandardSequences()
2238 
2239  self.completeInputCommand()
2240  self.addConditions()
2241 
2242 
2243  outputModuleCfgCode=""
2244  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2245  outputModuleCfgCode=self.addOutput()
2246 
2247  self.addCommon()
2248 
2249  self.pythonCfgCode += "# import of standard configurations\n"
2250  for module in self.imports:
2251  self.pythonCfgCode += ("process.load('"+module+"')\n")
2252 
2253  # production info
2254  if not hasattr(self.process,"configurationMetadata"):
2255  self.build_production_info(self._options.evt_type, self._options.number)
2256  else:
2257  #the PSet was added via a load
2258  self.addedObjects.append(("Production Info","configurationMetadata"))
2259 
2260  self.pythonCfgCode +="\n"
2261  for comment,object in self.addedObjects:
2262  if comment!="":
2263  self.pythonCfgCode += "\n# "+comment+"\n"
2264  self.pythonCfgCode += dumpPython(self.process,object)
2265 
2266  # dump the output definition
2267  self.pythonCfgCode += "\n# Output definition\n"
2268  self.pythonCfgCode += outputModuleCfgCode
2269 
2270  # dump all additional outputs (e.g. alca or skim streams)
2271  self.pythonCfgCode += "\n# Additional output definition\n"
2272  #I do not understand why the keys are not normally ordered.
2273  nl=sorted(self.additionalOutputs.keys())
2274  for name in nl:
2275  output = self.additionalOutputs[name]
2276  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2277  tmpOut = cms.EndPath(output)
2278  setattr(self.process,name+'OutPath',tmpOut)
2279  self.schedule.append(tmpOut)
2280 
2281  # dump all additional commands
2282  self.pythonCfgCode += "\n# Other statements\n"
2283  for command in self.additionalCommands:
2284  self.pythonCfgCode += command + "\n"
2285 
2286  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2287  for object in self._options.inlineObjets.split(','):
2288  if not object:
2289  continue
2290  if not hasattr(self.process,object):
2291  print('cannot inline -'+object+'- : not known')
2292  else:
2293  self.pythonCfgCode +='\n'
2294  self.pythonCfgCode +=dumpPython(self.process,object)
2295 
2296  if self._options.pileup=='HiMixEmbGEN':
2297  self.pythonCfgCode += "\nprocess.generator.embeddingMode=cms.int32(1)\n"
2298 
2299  # dump all paths
2300  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2301  for path in self.process.paths:
2302  if getattr(self.process,path) not in self.blacklist_paths:
2303  self.pythonCfgCode += dumpPython(self.process,path)
2304 
2305  for endpath in self.process.endpaths:
2306  if getattr(self.process,endpath) not in self.blacklist_paths:
2307  self.pythonCfgCode += dumpPython(self.process,endpath)
2308 
2309  # dump the schedule
2310  self.pythonCfgCode += "\n# Schedule definition\n"
2311 
2312  # handling of the schedule
2313  pathNames = ['process.'+p.label_() for p in self.schedule]
2314  if self.process.schedule == None:
2315  self.process.schedule = cms.Schedule()
2316  for item in self.schedule:
2317  self.process.schedule.append(item)
2318  result = 'process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2319  else:
2320  if not isinstance(self.scheduleIndexOfFirstHLTPath, int):
2321  raise Exception('the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2322 
2323  for index, item in enumerate(self.schedule):
2324  if index < self.scheduleIndexOfFirstHLTPath:
2325  self.process.schedule.insert(index, item)
2326  else:
2327  self.process.schedule.append(item)
2328 
2329  result = "# process.schedule imported from cff in HLTrigger.Configuration\n"
2330  for index, item in enumerate(pathNames[:self.scheduleIndexOfFirstHLTPath]):
2331  result += 'process.schedule.insert('+str(index)+', '+item+')\n'
2332  if self.scheduleIndexOfFirstHLTPath < len(pathNames):
2333  result += 'process.schedule.extend(['+','.join(pathNames[self.scheduleIndexOfFirstHLTPath:])+'])\n'
2334 
2335  self.pythonCfgCode += result
2336 
2337  for labelToAssociate in self.labelsToAssociate:
2338  self.process.schedule.associate(getattr(self.process, labelToAssociate))
2339  self.pythonCfgCode += 'process.schedule.associate(process.' + labelToAssociate + ')\n'
2340 
2341  from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
2343  self.pythonCfgCode+="from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n"
2344  self.pythonCfgCode+="associatePatAlgosToolsTask(process)\n"
2345 
2346  overrideThreads = (self._options.nThreads != "1")
2347  overrideConcurrentLumis = (self._options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2348  overrideConcurrentIOVs = (self._options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2349 
2350  if overrideThreads or overrideConcurrentLumis or overrideConcurrentIOVs:
2351  self.pythonCfgCode +="\n"
2352  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2353  if overrideThreads:
2354  self.pythonCfgCode +="process.options.numberOfThreads = "+self._options.nThreads+"\n"
2355  self.pythonCfgCode +="process.options.numberOfStreams = "+self._options.nStreams+"\n"
2356  self.process.options.numberOfThreads = int(self._options.nThreads)
2357  self.process.options.numberOfStreams = int(self._options.nStreams)
2358  if overrideConcurrentLumis:
2359  self.pythonCfgCode +="process.options.numberOfConcurrentLuminosityBlocks = "+self._options.nConcurrentLumis+"\n"
2360  self.process.options.numberOfConcurrentLuminosityBlocks = int(self._options.nConcurrentLumis)
2361  if overrideConcurrentIOVs:
2362  self.pythonCfgCode +="process.options.eventSetup.numberOfConcurrentIOVs = "+self._options.nConcurrentIOVs+"\n"
2363  self.process.options.eventSetup.numberOfConcurrentIOVs = int(self._options.nConcurrentIOVs)
2364 
2365  if self._options.accelerators is not None:
2366  accelerators = self._options.accelerators.split(',')
2367  self.pythonCfgCode += "\n"
2368  self.pythonCfgCode += "# Enable only these accelerator backends\n"
2369  self.pythonCfgCode += "process.load('Configuration.StandardSequences.Accelerators_cff')\n"
2370  self.pythonCfgCode += "process.options.accelerators = ['" + "', '".join(accelerators) + "']\n"
2371  self.process.load('Configuration.StandardSequences.Accelerators_cff')
2372  self.process.options.accelerators = accelerators
2373 
2374  #repacked version
2375  if self._options.isRepacked:
2376  self.pythonCfgCode +="\n"
2377  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2378  self.pythonCfgCode +="MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n"
2379  MassReplaceInputTag(self.process, new="rawDataMapperByLabel", old="rawDataCollector")
2380 
2381  # special treatment in case of production filter sequence 2/2
2382  if self.productionFilterSequence and not (self._options.pileup=='HiMixEmbGEN'):
2383  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2384  self.pythonCfgCode +='for path in process.paths:\n'
2385  if len(self.conditionalPaths):
2386  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2387  if len(self.excludedPaths):
2388  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2389  self.pythonCfgCode +='\tgetattr(process,path).insert(0, process.%s)\n'%(self.productionFilterSequence,)
2390  pfs = getattr(self.process,self.productionFilterSequence)
2391  for path in self.process.paths:
2392  if not path in self.conditionalPaths: continue
2393  if path in self.excludedPaths: continue
2394  getattr(self.process,path).insert(0, pfs)
2395 
2396 
2397  # dump customise fragment
2398  self.pythonCfgCode += self.addCustomise()
2399 
2400  if self._options.runUnscheduled:
2401  print("--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2402  # Keep the "unscheduled customise functions" separate for now,
2403  # there are customize functions given by users (in our unit
2404  # tests) that need to be run before the "unscheduled customise
2405  # functions"
2406  self.pythonCfgCode += self.addCustomise(1)
2407 
2408  self.pythonCfgCode += self.addCustomiseCmdLine()
2409 
2410  if hasattr(self.process,"logErrorHarvester"):
2411  #configure logErrorHarvester to wait for same EDProducers to finish as the OutputModules
2412  self.pythonCfgCode +="\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n"
2413  self.pythonCfgCode +="from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n"
2414  self.pythonCfgCode +="process = customiseLogErrorHarvesterUsingOutputCommands(process)\n"
2415  from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands
2417 
2418  # Temporary hack to put the early delete customization after
2419  # everything else
2420  #
2421  # FIXME: remove when no longer needed
2422  self.pythonCfgCode += "\n# Add early deletion of temporary data products to reduce peak memory need\n"
2423  self.pythonCfgCode += "from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2424  self.pythonCfgCode += "process = customiseEarlyDelete(process)\n"
2425  self.pythonCfgCode += "# End adding early deletion\n"
2426  from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
2427  self.process = customiseEarlyDelete(self.process)
2428 
2429  imports = cms.specialImportRegistry.getSpecialImports()
2430  if len(imports) > 0:
2431  #need to inject this at the top
2432  index = self.pythonCfgCode.find("import FWCore.ParameterSet.Config")
2433  #now find the end of line
2434  index = self.pythonCfgCode.find("\n",index)
2435  self.pythonCfgCode = self.pythonCfgCode[:index]+ "\n" + "\n".join(imports)+"\n" +self.pythonCfgCode[index:]
2436 
2437 
2438  # make the .io file
2439 
2440  if self._options.io:
2441  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2442  if not self._options.io.endswith('.io'): self._option.io+='.io'
2443  io=open(self._options.io,'w')
2444  ioJson={}
2445  if hasattr(self.process.source,"fileNames"):
2446  if len(self.process.source.fileNames.value()):
2447  ioJson['primary']=self.process.source.fileNames.value()
2448  if hasattr(self.process.source,"secondaryFileNames"):
2449  if len(self.process.source.secondaryFileNames.value()):
2450  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2451  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2452  ioJson['pileup']=self._options.pileup_input[4:]
2453  for (o,om) in self.process.outputModules_().items():
2454  ioJson[o]=om.fileName.value()
2455  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2456  if self.productionFilterSequence:
2457  ioJson['filter']=self.productionFilterSequence
2458  import json
2459  io.write(json.dumps(ioJson))
2460  return
2461 
2462 
def load(self, includeFile)
def prepare_L1(self, stepSpec=None)
def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ='')
def expandMapping(self, seqList, mapping, index=None)
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT', verbosityLevel=1)
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:37
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
Definition: MassReplace.py:79
inliner
load the relevant part
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def prepare_CFWRITER(self, stepSpec=None)
def prepare_RECOBEFMIX(self, stepSpec="reconstruction")
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
Definition: FindCaloHit.cc:19
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, stepSpec=None)
assert(be >=bs)
def build_production_info(self, evt_type, evtnumber)
def ProcessName(process)
Definition: CustomConfigs.py:6
def prepare_RECOSIM(self, stepSpec="recosim")
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def prepare_L1HwVal(self, stepSpec='L1HwVal')
def prepare_DIGI2RAW(self, stepSpec=None)
def prepare_POSTRECO(self, stepSpec=None)
def prepare_SKIM(self, stepSpec="all")
def prepare_ALCAPRODUCER(self, stepSpec=None)
def prepare_HARVESTING(self, stepSpec=None)
def prepare_ALCAOUTPUT(self, stepSpec=None)
def prepare_RAW2DIGI(self, stepSpec="RawToDigi")
def prepare_GEN(self, stepSpec=None)
def prepare_FILTER(self, stepSpec=None)
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
def prepare_PAT(self, stepSpec="miniAOD")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_L1Reco(self, stepSpec="L1Reco")
def prepare_HLT(self, stepSpec=None)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
Definition: HCMethods.h:50
def prepare_DIGI(self, stepSpec=None)
def loadAndRemember(self, includeFile)
def prepare_ENDJOB(self, stepSpec='endOfProcess')
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_DQM(self, stepSpec='DQMOffline')
def prepare_ALCAHARVEST(self, stepSpec=None)
static std::string join(char **cmd)
Definition: RemoteFile.cc:19
def prepare_USER(self, stepSpec=None)
def prepare_ALCA(self, stepSpec=None, workflow='full')
def defineMixing(dict)
Definition: Mixing.py:207
def dumpPython(process, name)
def miniAOD_customizeOutput(out)
def encode(args, files)
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
Definition: helpers.py:24
def prepare_REPACK(self, stepSpec=None)
def prepare_NANOGEN(self, stepSpec="nanoAOD")
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_L1REPACK(self, stepSpec=None)
def prepare_L1TrackTrigger(self, stepSpec="L1TrackTrigger")
def prepare_RAW2RECO(self, stepSpec=None)
def prepare_NANO(self, stepSpec='')
def prepare_VALIDATION(self, stepSpec='validation')
def lumi_to_run(runs, events_in_sample, events_per_job)
Definition: LumiToRun.py:1
def scheduleSequenceAtEnd(self, seq, prefix)
#define str(s)
def prepare_RECO(self, stepSpec="reconstruction")
def prepare_SIM(self, stepSpec=None)
def filesFromList(fileName, s=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def prepare_PATGEN(self, stepSpec="miniGEN")
def prepare_LHE(self, stepSpec=None)
def prepare_DATAMIX(self, stepSpec=None)
def executeAndRemember(self, command)
nextScheduleIsConditional
put the filtering path in the schedule