CMS 3D CMS Logo

ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python3
2 
3 from __future__ import print_function
4 __version__ = "$Revision: 1.19 $"
5 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
6 
7 import FWCore.ParameterSet.Config as cms
8 from FWCore.ParameterSet.Modules import _Module
9 # The following import is provided for backward compatibility reasons.
10 # The function used to be defined in this file.
11 from FWCore.ParameterSet.MassReplace import massReplaceInputTag as MassReplaceInputTag
12 
13 import hashlib
14 import sys
15 import re
16 import collections
17 from subprocess import Popen,PIPE
18 import FWCore.ParameterSet.DictTypes as DictTypes
19 from FWCore.ParameterSet.OrderedSet import OrderedSet
20 class Options:
21  pass
22 
23 # the canonical defaults
24 defaultOptions = Options()
25 defaultOptions.datamix = 'DataOnSim'
26 defaultOptions.isMC=False
27 defaultOptions.isData=True
28 defaultOptions.step=''
29 defaultOptions.pileup='NoPileUp'
30 defaultOptions.pileup_input = None
31 defaultOptions.pileup_dasoption = ''
32 defaultOptions.geometry = 'SimDB'
33 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
34 defaultOptions.magField = ''
35 defaultOptions.conditions = None
36 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
37 defaultOptions.harvesting= 'AtRunEnd'
38 defaultOptions.gflash = False
39 defaultOptions.number = -1
40 defaultOptions.number_out = None
41 defaultOptions.arguments = ""
42 defaultOptions.name = "NO NAME GIVEN"
43 defaultOptions.evt_type = ""
44 defaultOptions.filein = ""
45 defaultOptions.dasquery=""
46 defaultOptions.dasoption=""
47 defaultOptions.secondfilein = ""
48 defaultOptions.customisation_file = []
49 defaultOptions.customisation_file_unsch = []
50 defaultOptions.customise_commands = ""
51 defaultOptions.inline_custom=False
52 defaultOptions.particleTable = 'pythiapdt'
53 defaultOptions.particleTableList = ['pythiapdt','pdt']
54 defaultOptions.dirin = ''
55 defaultOptions.dirout = ''
56 defaultOptions.filetype = 'EDM'
57 defaultOptions.fileout = 'output.root'
58 defaultOptions.filtername = ''
59 defaultOptions.lazy_download = False
60 defaultOptions.custom_conditions = ''
61 defaultOptions.hltProcess = ''
62 defaultOptions.eventcontent = None
63 defaultOptions.datatier = None
64 defaultOptions.inlineEventContent = True
65 defaultOptions.inlineObjets =''
66 defaultOptions.hideGen=False
67 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
68 defaultOptions.beamspot=None
69 defaultOptions.outputDefinition =''
70 defaultOptions.inputCommands = None
71 defaultOptions.outputCommands = None
72 defaultOptions.inputEventContent = ''
73 defaultOptions.dropDescendant = False
74 defaultOptions.relval = None
75 defaultOptions.profile = None
76 defaultOptions.heap_profile = None
77 defaultOptions.isRepacked = False
78 defaultOptions.restoreRNDSeeds = False
79 defaultOptions.donotDropOnInput = ''
80 defaultOptions.python_filename =''
81 defaultOptions.io=None
82 defaultOptions.lumiToProcess=None
83 defaultOptions.fast=False
84 defaultOptions.runsAndWeightsForMC = None
85 defaultOptions.runsScenarioForMC = None
86 defaultOptions.runsAndWeightsForMCIntegerWeights = None
87 defaultOptions.runsScenarioForMCIntegerWeights = None
88 defaultOptions.runUnscheduled = False
89 defaultOptions.timeoutOutput = False
90 defaultOptions.nThreads = '1'
91 defaultOptions.nStreams = '0'
92 defaultOptions.nConcurrentLumis = '0'
93 defaultOptions.nConcurrentIOVs = '0'
94 defaultOptions.accelerators = None
95 
96 # some helper routines
97 def dumpPython(process,name):
98  theObject = getattr(process,name)
99  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
100  return "process."+name+" = " + theObject.dumpPython()
101  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
102  return "process."+name+" = " + theObject.dumpPython()+"\n"
103  else:
104  return "process."+name+" = " + theObject.dumpPython()+"\n"
105 def filesFromList(fileName,s=None):
106  import os
107  import FWCore.ParameterSet.Config as cms
108  prim=[]
109  sec=[]
110  for line in open(fileName,'r'):
111  if line.count(".root")>=2:
112  #two files solution...
113  entries=line.replace("\n","").split()
114  prim.append(entries[0])
115  sec.append(entries[1])
116  elif (line.find(".root")!=-1):
117  entry=line.replace("\n","")
118  prim.append(entry)
119  # remove any duplicates but keep the order
120  file_seen = set()
121  prim = [f for f in prim if not (f in file_seen or file_seen.add(f))]
122  file_seen = set()
123  sec = [f for f in sec if not (f in file_seen or file_seen.add(f))]
124  if s:
125  if not hasattr(s,"fileNames"):
126  s.fileNames=cms.untracked.vstring(prim)
127  else:
128  s.fileNames.extend(prim)
129  if len(sec)!=0:
130  if not hasattr(s,"secondaryFileNames"):
131  s.secondaryFileNames=cms.untracked.vstring(sec)
132  else:
133  s.secondaryFileNames.extend(sec)
134  print("found files: ",prim)
135  if len(prim)==0:
136  raise Exception("There are not files in input from the file list")
137  if len(sec)!=0:
138  print("found parent files:",sec)
139  return (prim,sec)
140 
141 def filesFromDASQuery(query,option="",s=None):
142  import os,time
143  import FWCore.ParameterSet.Config as cms
144  prim=[]
145  sec=[]
146  print("the query is",query)
147  eC=5
148  count=0
149  while eC!=0 and count<3:
150  if count!=0:
151  print('Sleeping, then retrying DAS')
152  time.sleep(100)
153  p = Popen('dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=True, universal_newlines=True)
154  pipe=p.stdout.read()
155  tupleP = os.waitpid(p.pid, 0)
156  eC=tupleP[1]
157  count=count+1
158  if eC==0:
159  print("DAS succeeded after",count,"attempts",eC)
160  else:
161  print("DAS failed 3 times- I give up")
162  for line in pipe.split('\n'):
163  if line.count(".root")>=2:
164  #two files solution...
165  entries=line.replace("\n","").split()
166  prim.append(entries[0])
167  sec.append(entries[1])
168  elif (line.find(".root")!=-1):
169  entry=line.replace("\n","")
170  prim.append(entry)
171  # remove any duplicates
172  prim = sorted(list(set(prim)))
173  sec = sorted(list(set(sec)))
174  if s:
175  if not hasattr(s,"fileNames"):
176  s.fileNames=cms.untracked.vstring(prim)
177  else:
178  s.fileNames.extend(prim)
179  if len(sec)!=0:
180  if not hasattr(s,"secondaryFileNames"):
181  s.secondaryFileNames=cms.untracked.vstring(sec)
182  else:
183  s.secondaryFileNames.extend(sec)
184  print("found files: ",prim)
185  if len(sec)!=0:
186  print("found parent files:",sec)
187  return (prim,sec)
188 
189 def anyOf(listOfKeys,dict,opt=None):
190  for k in listOfKeys:
191  if k in dict:
192  toReturn=dict[k]
193  dict.pop(k)
194  return toReturn
195  if opt!=None:
196  return opt
197  else:
198  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
199 
201  """The main building routines """
202 
203  def __init__(self, options, process = None, with_output = False, with_input = False ):
204  """options taken from old cmsDriver and optparse """
205 
206  options.outfile_name = options.dirout+options.fileout
207 
208  self._options = options
209 
210  if self._options.isData and options.isMC:
211  raise Exception("ERROR: You may specify only --data or --mc, not both")
212  #if not self._options.conditions:
213  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
214 
215  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
216  if 'ENDJOB' in self._options.step:
217  if (hasattr(self._options,"outputDefinition") and \
218  self._options.outputDefinition != '' and \
219  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
220  (hasattr(self._options,"datatier") and \
221  self._options.datatier and \
222  'DQMIO' in self._options.datatier):
223  print("removing ENDJOB from steps since not compatible with DQMIO dataTier")
224  self._options.step=self._options.step.replace(',ENDJOB','')
225 
226 
227 
228  # what steps are provided by this class?
229  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
230  self.stepMap={}
231  self.stepKeys=[]
232  for step in self._options.step.split(","):
233  if step=='': continue
234  stepParts = step.split(":")
235  stepName = stepParts[0]
236  if stepName not in stepList and not stepName.startswith('re'):
237  raise ValueError("Step {} unknown. Available are {}".format( stepName , sorted(stepList)))
238  if len(stepParts)==1:
239  self.stepMap[stepName]=""
240  elif len(stepParts)==2:
241  self.stepMap[stepName]=stepParts[1].split('+')
242  elif len(stepParts)==3:
243  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
244  else:
245  raise ValueError(f"Step definition {step} invalid")
246  self.stepKeys.append(stepName)
247 
248  #print(f"map of steps is: {self.stepMap}")
249 
250  self.with_output = with_output
251  self.process=process
252 
253  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
254  self.with_output = False
255  self.with_input = with_input
256  self.imports = []
257  self.create_process()
258  self.define_Configs()
259  self.schedule = list()
261 
262  # we are doing three things here:
263  # creating a process to catch errors
264  # building the code to re-create the process
265 
267  # TODO: maybe a list of to be dumped objects would help as well
268  self.blacklist_paths = []
269  self.addedObjects = []
271 
277 
278  def profileOptions(self):
279  """
280  addIgProfService
281  Function to add the igprof profile service so that you can dump in the middle
282  of the run.
283  """
284  profileOpts = self._options.profile.split(':')
285  profilerStart = 1
286  profilerInterval = 100
287  profilerFormat = None
288  profilerJobFormat = None
289 
290  if len(profileOpts):
291  #type, given as first argument is unused here
292  profileOpts.pop(0)
293  if len(profileOpts):
294  startEvent = profileOpts.pop(0)
295  if not startEvent.isdigit():
296  raise Exception("%s is not a number" % startEvent)
297  profilerStart = int(startEvent)
298  if len(profileOpts):
299  eventInterval = profileOpts.pop(0)
300  if not eventInterval.isdigit():
301  raise Exception("%s is not a number" % eventInterval)
302  profilerInterval = int(eventInterval)
303  if len(profileOpts):
304  profilerFormat = profileOpts.pop(0)
305 
306 
307  if not profilerFormat:
308  profilerFormat = "%s___%s___%%I.gz" % (
309  self._options.evt_type.replace("_cfi", ""),
310  hashlib.md5(
311  (str(self._options.step) + str(self._options.pileup) + str(self._options.conditions) +
312  str(self._options.datatier) + str(self._options.profileTypeLabel)).encode('utf-8')
313  ).hexdigest()
314  )
315  if not profilerJobFormat and profilerFormat.endswith(".gz"):
316  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
317  elif not profilerJobFormat:
318  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
319 
320  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
321 
323  """
324  addJeProfService
325  Function to add the jemalloc heap profile service so that you can dump in the middle
326  of the run.
327  """
328  profileOpts = self._options.profile.split(':')
329  profilerStart = 1
330  profilerInterval = 100
331  profilerFormat = None
332  profilerJobFormat = None
333 
334  if len(profileOpts):
335  #type, given as first argument is unused here
336  profileOpts.pop(0)
337  if len(profileOpts):
338  startEvent = profileOpts.pop(0)
339  if not startEvent.isdigit():
340  raise Exception("%s is not a number" % startEvent)
341  profilerStart = int(startEvent)
342  if len(profileOpts):
343  eventInterval = profileOpts.pop(0)
344  if not eventInterval.isdigit():
345  raise Exception("%s is not a number" % eventInterval)
346  profilerInterval = int(eventInterval)
347  if len(profileOpts):
348  profilerFormat = profileOpts.pop(0)
349 
350 
351  if not profilerFormat:
352  profilerFormat = "%s___%s___%%I.heap" % (
353  self._options.evt_type.replace("_cfi", ""),
354  hashlib.md5(
355  (str(self._options.step) + str(self._options.pileup) + str(self._options.conditions) +
356  str(self._options.datatier) + str(self._options.profileTypeLabel)).encode('utf-8')
357  ).hexdigest()
358  )
359  if not profilerJobFormat and profilerFormat.endswith(".heap"):
360  profilerJobFormat = profilerFormat.replace(".heap", "_EndOfJob.heap")
361  elif not profilerJobFormat:
362  profilerJobFormat = profilerFormat + "_EndOfJob.heap"
363 
364  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
365 
366  def load(self,includeFile):
367  includeFile = includeFile.replace('/','.')
368  self.process.load(includeFile)
369  return sys.modules[includeFile]
370 
371  def loadAndRemember(self, includeFile):
372  """helper routine to load am memorize imports"""
373  # we could make the imports a on-the-fly data method of the process instance itself
374  # not sure if the latter is a good idea
375  includeFile = includeFile.replace('/','.')
376  self.imports.append(includeFile)
377  self.process.load(includeFile)
378  return sys.modules[includeFile]
379 
380  def executeAndRemember(self, command):
381  """helper routine to remember replace statements"""
382  self.additionalCommands.append(command)
383  if not command.strip().startswith("#"):
384  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
385  import re
386  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
387  #exec(command.replace("process.","self.process."))
388 
389  def addCommon(self):
390  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
391  self.process.options.Rethrow = ['ProductNotFound']
392  self.process.options.fileMode = 'FULLMERGE'
393 
394  self.addedObjects.append(("","options"))
395 
396  if self._options.lazy_download:
397  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
398  stats = cms.untracked.bool(True),
399  enable = cms.untracked.bool(True),
400  cacheHint = cms.untracked.string("lazy-download"),
401  readHint = cms.untracked.string("read-ahead-buffered")
402  )
403  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
404 
405  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
406  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
407 
408  if self._options.profile:
409  (start, interval, eventFormat, jobFormat)=self.profileOptions()
410  self.process.IgProfService = cms.Service("IgProfService",
411  reportFirstEvent = cms.untracked.int32(start),
412  reportEventInterval = cms.untracked.int32(interval),
413  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
414  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
415  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
416 
417  if self._options.heap_profile:
418  (start, interval, eventFormat, jobFormat)=self.heapProfileOptions()
419  self.process.JeProfService = cms.Service("JeProfService",
420  reportFirstEvent = cms.untracked.int32(start),
421  reportEventInterval = cms.untracked.int32(interval),
422  reportToFileAtPostEvent = cms.untracked.string("%s"%(eventFormat)),
423  reportToFileAtPostEndJob = cms.untracked.string("%s"%(jobFormat)))
424  self.addedObjects.append(("Setup JeProf Service for heap profiling","JeProfService"))
425 
426  def addMaxEvents(self):
427  """Here we decide how many evts will be processed"""
428  self.process.maxEvents.input = int(self._options.number)
429  if self._options.number_out:
430  self.process.maxEvents.output = int(self._options.number_out)
431  self.addedObjects.append(("","maxEvents"))
432 
433  def addSource(self):
434  """Here the source is built. Priority: file, generator"""
435  self.addedObjects.append(("Input source","source"))
436 
437  def filesFromOption(self):
438  for entry in self._options.filein.split(','):
439  print("entry",entry)
440  if entry.startswith("filelist:"):
441  filesFromList(entry[9:],self.process.source)
442  elif entry.startswith("dbs:") or entry.startswith("das:"):
443  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
444  else:
445  self.process.source.fileNames.append(self._options.dirin+entry)
446  if self._options.secondfilein:
447  if not hasattr(self.process.source,"secondaryFileNames"):
448  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
449  for entry in self._options.secondfilein.split(','):
450  print("entry",entry)
451  if entry.startswith("filelist:"):
452  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
453  elif entry.startswith("dbs:") or entry.startswith("das:"):
454  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
455  else:
456  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
457 
458  if self._options.filein or self._options.dasquery:
459  if self._options.filetype == "EDM":
460  self.process.source=cms.Source("PoolSource",
461  fileNames = cms.untracked.vstring(),
462  secondaryFileNames= cms.untracked.vstring())
463  filesFromOption(self)
464  elif self._options.filetype == "DAT":
465  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
466  filesFromOption(self)
467  elif self._options.filetype == "LHE":
468  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
469  if self._options.filein.startswith("lhe:"):
470  #list the article directory automatically
471  args=self._options.filein.split(':')
472  article=args[1]
473  print('LHE input from article ',article)
474  location='/store/lhe/'
475  import os
476  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
477  for line in textOfFiles:
478  for fileName in [x for x in line.split() if '.lhe' in x]:
479  self.process.source.fileNames.append(location+article+'/'+fileName)
480  #check first if list of LHE files is loaded (not empty)
481  if len(line)<2:
482  print('Issue to load LHE files, please check and try again.')
483  sys.exit(-1)
484  #Additional check to protect empty fileNames in process.source
485  if len(self.process.source.fileNames)==0:
486  print('Issue with empty filename, but can pass line check')
487  sys.exit(-1)
488  if len(args)>2:
489  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
490  else:
491  filesFromOption(self)
492 
493  elif self._options.filetype == "DQM":
494  self.process.source=cms.Source("DQMRootSource",
495  fileNames = cms.untracked.vstring())
496  filesFromOption(self)
497 
498  elif self._options.filetype == "DQMDAQ":
499  # FIXME: how to configure it if there are no input files specified?
500  self.process.source=cms.Source("DQMStreamerReader")
501 
502 
503  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
504  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
505 
506  if self._options.dasquery!='':
507  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
508  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
509 
510  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
511  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
512 
513 
514  if 'GEN' in self.stepMap.keys() and not self._options.filetype == "LHE":
515  if self._options.inputCommands:
516  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
517  else:
518  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
519 
520  if self.process.source and self._options.inputCommands and not self._options.filetype == "LHE":
521  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
522  for command in self._options.inputCommands.split(','):
523  # remove whitespace around the keep/drop statements
524  command = command.strip()
525  if command=='': continue
526  self.process.source.inputCommands.append(command)
527  if not self._options.dropDescendant:
528  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
529 
530  if self._options.lumiToProcess:
531  import FWCore.PythonUtilities.LumiList as LumiList
532  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
533 
534  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
535  if self.process.source is None:
536  self.process.source=cms.Source("EmptySource")
537 
538  # modify source in case of run-dependent MC
539  self.runsAndWeights=None
540  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
541  if not self._options.isMC :
542  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
543  if self._options.runsAndWeightsForMC:
544  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
545  else:
546  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
547  if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
548  __import__(RunsAndWeights[self._options.runsScenarioForMC])
549  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
550  else:
551  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
552 
553  if self.runsAndWeights:
554  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
556  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
557  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
558 
559  # modify source in case of run-dependent MC (Run-3 method)
561  if self._options.runsAndWeightsForMCIntegerWeights or self._options.runsScenarioForMCIntegerWeights:
562  if not self._options.isMC :
563  raise Exception("options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
564  if self._options.runsAndWeightsForMCIntegerWeights:
565  self.runsAndWeightsInt = eval(self._options.runsAndWeightsForMCIntegerWeights)
566  else:
567  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
568  if isinstance(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights], str):
569  __import__(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights])
570  self.runsAndWeightsInt = sys.modules[RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
571  else:
572  self.runsAndWeightsInt = RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]
573 
574  if self.runsAndWeightsInt:
575  if not self._options.relval:
576  raise Exception("--relval option required when using --runsAndWeightsInt")
577  if 'DATAMIX' in self._options.step:
578  from SimGeneral.Configuration.LumiToRun import lumi_to_run
579  total_events, events_per_job = self._options.relval.split(',')
580  lumi_to_run_mapping = lumi_to_run(self.runsAndWeightsInt, int(total_events), int(events_per_job))
581  self.additionalCommands.append("process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " + str(lumi_to_run_mapping) + "])")
582 
583  return
584 
585  def addOutput(self):
586  """ Add output module to the process """
587  result=""
588  if self._options.outputDefinition:
589  if self._options.datatier:
590  print("--datatier & --eventcontent options ignored")
591 
592  #new output convention with a list of dict
593  outList = eval(self._options.outputDefinition)
594  for (id,outDefDict) in enumerate(outList):
595  outDefDictStr=outDefDict.__str__()
596  if not isinstance(outDefDict,dict):
597  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
598  #requires option: tier
599  theTier=anyOf(['t','tier','dataTier'],outDefDict)
600  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
601 
602  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
603  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
604  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
605  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
606  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
607  # module label has a particular role
608  if not theModuleLabel:
609  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
610  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
611  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
612  ]
613  for name in tryNames:
614  if not hasattr(self.process,name):
615  theModuleLabel=name
616  break
617  if not theModuleLabel:
618  raise Exception("cannot find a module label for specification: "+outDefDictStr)
619  if id==0:
620  defaultFileName=self._options.outfile_name
621  else:
622  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
623 
624  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
625  if not theFileName.endswith('.root'):
626  theFileName+='.root'
627 
628  if len(outDefDict):
629  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
630  if theStreamType=='DQMIO': theStreamType='DQM'
631  if theStreamType=='ALL':
632  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
633  else:
634  theEventContent = getattr(self.process, theStreamType+"EventContent")
635 
636 
637  addAlCaSelects=False
638  if theStreamType=='ALCARECO' and not theFilterName:
639  theFilterName='StreamALCACombined'
640  addAlCaSelects=True
641 
642  CppType='PoolOutputModule'
643  if self._options.timeoutOutput:
644  CppType='TimeoutPoolOutputModule'
645  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
646  output = cms.OutputModule(CppType,
647  theEventContent.clone(),
648  fileName = cms.untracked.string(theFileName),
649  dataset = cms.untracked.PSet(
650  dataTier = cms.untracked.string(theTier),
651  filterName = cms.untracked.string(theFilterName))
652  )
653  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
654  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
655  if not theSelectEvent and hasattr(self.process,'filtering_step'):
656  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
657  if theSelectEvent:
658  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
659 
660  if addAlCaSelects:
661  if not hasattr(output,'SelectEvents'):
662  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
663  for alca in self.AlCaPaths:
664  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
665 
666 
667  if hasattr(self.process,theModuleLabel):
668  raise Exception("the current process already has a module "+theModuleLabel+" defined")
669  #print "creating output module ",theModuleLabel
670  setattr(self.process,theModuleLabel,output)
671  outputModule=getattr(self.process,theModuleLabel)
672  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
673  path=getattr(self.process,theModuleLabel+'_step')
674  self.schedule.append(path)
675 
676  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
677  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"): return label
678  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
679  if theExtraOutputCommands:
680  if not isinstance(theExtraOutputCommands,list):
681  raise Exception("extra ouput command in --option must be a list of strings")
682  if hasattr(self.process,theStreamType+"EventContent"):
683  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
684  else:
685  outputModule.outputCommands.extend(theExtraOutputCommands)
686 
687  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
688 
689 
690  return result
691 
692  streamTypes=self._options.eventcontent.split(',')
693  tiers=self._options.datatier.split(',')
694  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
695  raise Exception("number of event content arguments does not match number of datatier arguments")
696 
697  # if the only step is alca we don't need to put in an output
698  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
699  return "\n"
700 
701  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
702  if streamType=='': continue
703  if streamType == 'ALCARECO' and not 'ALCAPRODUCER' in self._options.step: continue
704  if streamType=='DQMIO': streamType='DQM'
705  eventContent=streamType
706 
707  if streamType == "NANOEDMAOD" :
708  eventContent = "NANOAOD"
709  elif streamType == "NANOEDMAODSIM" :
710  eventContent = "NANOAODSIM"
711  theEventContent = getattr(self.process, eventContent+"EventContent")
712  if i==0:
713  theFileName=self._options.outfile_name
714  theFilterName=self._options.filtername
715  else:
716  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
717  theFilterName=self._options.filtername
718  CppType='PoolOutputModule'
719  if self._options.timeoutOutput:
720  CppType='TimeoutPoolOutputModule'
721  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
722  if "NANOAOD" in streamType : CppType='NanoAODOutputModule'
723  output = cms.OutputModule(CppType,
724  theEventContent,
725  fileName = cms.untracked.string(theFileName),
726  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
727  filterName = cms.untracked.string(theFilterName)
728  )
729  )
730  if hasattr(self.process,"generation_step") and streamType!='LHE':
731  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
732  if hasattr(self.process,"filtering_step"):
733  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
734 
735  if streamType=='ALCARECO':
736  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
737 
738  if "MINIAOD" in streamType:
739  from PhysicsTools.PatAlgos.slimming.miniAOD_tools import miniAOD_customizeOutput
741 
742  outputModuleName=streamType+'output'
743  setattr(self.process,outputModuleName,output)
744  outputModule=getattr(self.process,outputModuleName)
745  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
746  path=getattr(self.process,outputModuleName+'_step')
747  self.schedule.append(path)
748 
749  if self._options.outputCommands and streamType!='DQM':
750  for evct in self._options.outputCommands.split(','):
751  if not evct: continue
752  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
753 
754  if not self._options.inlineEventContent:
755  tmpstreamType=streamType
756  if "NANOEDM" in tmpstreamType :
757  tmpstreamType=tmpstreamType.replace("NANOEDM","NANO")
758  def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
759  return label
760  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
761 
762  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
763 
764  return result
765 
766  def addStandardSequences(self):
767  """
768  Add selected standard sequences to the process
769  """
770  # load the pile up file
771  if self._options.pileup:
772  pileupSpec=self._options.pileup.split(',')[0]
773 
774  # Does the requested pile-up scenario exist?
775  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
776  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
777  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
778  raise Exception(message)
779 
780  # Put mixing parameters in a dictionary
781  if '.' in pileupSpec:
782  mixingDict={'file':pileupSpec}
783  elif pileupSpec.startswith('file:'):
784  mixingDict={'file':pileupSpec[5:]}
785  else:
786  import copy
787  mixingDict=copy.copy(Mixing[pileupSpec])
788  if len(self._options.pileup.split(','))>1:
789  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
790 
791  # Load the pu cfg file corresponding to the requested pu scenario
792  if 'file:' in pileupSpec:
793  #the file is local
794  self.process.load(mixingDict['file'])
795  print("inlining mixing module configuration")
796  self._options.inlineObjets+=',mix'
797  else:
798  self.loadAndRemember(mixingDict['file'])
799 
800  mixingDict.pop('file')
801  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
802  if self._options.pileup_input:
803  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
804  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
805  elif self._options.pileup_input.startswith("filelist:"):
806  mixingDict['F']=(filesFromList(self._options.pileup_input[9:]))[0]
807  else:
808  mixingDict['F']=self._options.pileup_input.split(',')
809  specialization=defineMixing(mixingDict)
810  for command in specialization:
811  self.executeAndRemember(command)
812  if len(mixingDict)!=0:
813  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
814 
815 
816  # load the geometry file
817  try:
818  if len(self.stepMap):
819  self.loadAndRemember(self.GeometryCFF)
820  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
822  if self.geometryDBLabel:
823  self.executeAndRemember('if hasattr(process, "XMLFromDBSource"): process.XMLFromDBSource.label="%s"'%(self.geometryDBLabel))
824  self.executeAndRemember('if hasattr(process, "DDDetectorESProducerFromDB"): process.DDDetectorESProducerFromDB.label="%s"'%(self.geometryDBLabel))
825 
826  except ImportError:
827  print("Geometry option",self._options.geometry,"unknown.")
828  raise
829 
830  if len(self.stepMap):
831  self.loadAndRemember(self.magFieldCFF)
832 
833  for stepName in self.stepKeys:
834  stepSpec = self.stepMap[stepName]
835  print("Step:", stepName,"Spec:",stepSpec)
836  if stepName.startswith('re'):
837 
838  if stepName[2:] not in self._options.donotDropOnInput:
839  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
840  stepName=stepName[2:]
841  if stepSpec=="":
842  getattr(self,"prepare_"+stepName)(stepSpec = getattr(self,stepName+"DefaultSeq"))
843  elif isinstance(stepSpec, list):
844  getattr(self,"prepare_"+stepName)(stepSpec = '+'.join(stepSpec))
845  elif isinstance(stepSpec, tuple):
846  getattr(self,"prepare_"+stepName)(stepSpec = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
847  else:
848  raise ValueError("Invalid step definition")
849 
850  if self._options.restoreRNDSeeds!=False:
851  #it is either True, or a process name
852  if self._options.restoreRNDSeeds==True:
853  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
854  else:
855  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
856  if self._options.inputEventContent or self._options.inputCommands:
857  if self._options.inputCommands:
858  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
859  else:
860  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
861 
862 
863  def completeInputCommand(self):
864  if self._options.inputEventContent:
865  import copy
866  def dropSecondDropStar(iec):
867  #drop occurence of 'drop *' in the list
868  count=0
869  for item in iec:
870  if item=='drop *':
871  if count!=0:
872  iec.remove(item)
873  count+=1
874 
875 
876  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
877  for evct in self._options.inputEventContent.split(','):
878  if evct=='': continue
879  theEventContent = getattr(self.process, evct+"EventContent")
880  if hasattr(theEventContent,'outputCommands'):
881  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
882  if hasattr(theEventContent,'inputCommands'):
883  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
884 
885  dropSecondDropStar(self.process.source.inputCommands)
886 
887  if not self._options.dropDescendant:
888  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
889 
890 
891  return
892 
893  def addConditions(self):
894  """Add conditions to the process"""
895  if not self._options.conditions: return
896 
897  if 'FrontierConditions_GlobalTag' in self._options.conditions:
898  print('using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
899  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
900 
902  from Configuration.AlCa.GlobalTag import GlobalTag
903  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
904  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
905  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
906 
907 
908  def addCustomise(self,unsch=0):
909  """Include the customise code """
910 
911  custOpt=[]
912  if unsch==0:
913  for c in self._options.customisation_file:
914  custOpt.extend(c.split(","))
915  else:
916  for c in self._options.customisation_file_unsch:
917  custOpt.extend(c.split(","))
918 
919  custMap=DictTypes.SortedKeysDict()
920  for opt in custOpt:
921  if opt=='': continue
922  if opt.count('.')>1:
923  raise Exception("more than . in the specification:"+opt)
924  fileName=opt.split('.')[0]
925  if opt.count('.')==0: rest='customise'
926  else:
927  rest=opt.split('.')[1]
928  if rest=='py': rest='customise' #catch the case of --customise file.py
929 
930  if fileName in custMap:
931  custMap[fileName].extend(rest.split('+'))
932  else:
933  custMap[fileName]=rest.split('+')
934 
935  if len(custMap)==0:
936  final_snippet='\n'
937  else:
938  final_snippet='\n# customisation of the process.\n'
939 
940  allFcn=[]
941  for opt in custMap:
942  allFcn.extend(custMap[opt])
943  for fcn in allFcn:
944  if allFcn.count(fcn)!=1:
945  raise Exception("cannot specify twice "+fcn+" as a customisation method")
946 
947  for f in custMap:
948  # let python search for that package and do syntax checking at the same time
949  packageName = f.replace(".py","").replace("/",".")
950  __import__(packageName)
951  package = sys.modules[packageName]
952 
953  # now ask the package for its definition and pick .py instead of .pyc
954  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
955 
956  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
957  if self._options.inline_custom:
958  for line in file(customiseFile,'r'):
959  if "import FWCore.ParameterSet.Config" in line:
960  continue
961  final_snippet += line
962  else:
963  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
964  for fcn in custMap[f]:
965  print("customising the process with",fcn,"from",f)
966  if not hasattr(package,fcn):
967  #bound to fail at run time
968  raise Exception("config "+f+" has no function "+fcn)
969  #execute the command
970  self.process=getattr(package,fcn)(self.process)
971  #and print it in the configuration
972  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
973  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
974 
975  if len(custMap)!=0:
976  final_snippet += '\n# End of customisation functions\n'
977 
978 
979  return final_snippet
980 
981  def addCustomiseCmdLine(self):
982  final_snippet='\n# Customisation from command line\n'
983  if self._options.customise_commands:
984  import string
985  for com in self._options.customise_commands.split('\\n'):
986  com=com.lstrip()
987  self.executeAndRemember(com)
988  final_snippet +='\n'+com
989 
990  return final_snippet
991 
992  #----------------------------------------------------------------------------
993  # here the methods to define the python includes for each step or
994  # conditions
995  #----------------------------------------------------------------------------
996  def define_Configs(self):
997  if len(self.stepMap):
998  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
999  if self._options.particleTable not in defaultOptions.particleTableList:
1000  print('Invalid particle table provided. Options are:')
1001  print(defaultOptions.particleTable)
1002  sys.exit(-1)
1003  else:
1004  if len(self.stepMap):
1005  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
1006 
1007  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
1008 
1009  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
1010  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
1011  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
1012  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
1013  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
1014  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
1015  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
1016  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
1017  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
1018  if self._options.isRepacked: self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_DataMapper_cff"
1019  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
1020  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
1021  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
1022  self.RECOSIMDefaultCFF="Configuration/StandardSequences/RecoSim_cff"
1023  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
1024  self.NANODefaultCFF="PhysicsTools/NanoAOD/nano_cff"
1025  self.NANOGENDefaultCFF="PhysicsTools/NanoAOD/nanogen_cff"
1026  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
1027  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
1028  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
1029  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
1030  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
1031  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
1032  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
1033  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
1034  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
1035  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
1036  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
1038  if "DATAMIX" in self.stepMap.keys():
1039  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
1040  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
1041  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
1042  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
1043 
1044  self.ALCADefaultSeq=None
1045  self.LHEDefaultSeq='externalLHEProducer'
1046  self.GENDefaultSeq='pgen'
1047  self.SIMDefaultSeq='psim'
1048  self.DIGIDefaultSeq='pdigi'
1050  self.DIGI2RAWDefaultSeq='DigiToRaw'
1051  self.HLTDefaultSeq='GRun'
1052  self.L1DefaultSeq=None
1057  self.RAW2DIGIDefaultSeq='RawToDigi'
1058  self.L1RecoDefaultSeq='L1Reco'
1059  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
1060  if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
1061  self.RECODefaultSeq='reconstruction'
1062  else:
1063  self.RECODefaultSeq='reconstruction_fromRECO'
1064  self.RECOSIMDefaultSeq='recosim'
1066  self.L1HwValDefaultSeq='L1HwVal'
1067  self.DQMDefaultSeq='DQMOffline'
1069  self.ENDJOBDefaultSeq='endOfProcess'
1070  self.REPACKDefaultSeq='DigiToRawRepack'
1071  self.PATDefaultSeq='miniAOD'
1072  self.PATGENDefaultSeq='miniGEN'
1073  #TODO: Check based of file input
1074  self.NANOGENDefaultSeq='nanogenSequence'
1075  self.NANODefaultSeq='nanoSequence'
1077  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
1079  if not self._options.beamspot:
1080  self._options.beamspot=VtxSmearedDefaultKey
1081 
1082  # if its MC then change the raw2digi
1083  if self._options.isMC==True:
1084  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
1085  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
1086  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
1087  self.PATGENDefaultCFF="Configuration/StandardSequences/PATGEN_cff"
1088  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
1089  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
1090  self.NANODefaultSeq='nanoSequenceMC'
1091  else:
1092  self._options.beamspot = None
1093 
1094  #patch for gen, due to backward incompatibility
1095  if 'reGEN' in self.stepMap:
1096  self.GENDefaultSeq='fixGenInfo'
1097 
1098  if self._options.scenario=='cosmics':
1099  self._options.pileup='Cosmics'
1100  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1101  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1102  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1103  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1104  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1105  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1106  if self._options.isMC==True:
1107  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1108  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1109  self.RECODefaultSeq='reconstructionCosmics'
1110  self.DQMDefaultSeq='DQMOfflineCosmics'
1111 
1112  if self._options.scenario=='HeavyIons':
1113  if not self._options.beamspot:
1114  self._options.beamspot=VtxSmearedHIDefaultKey
1115  self.HLTDefaultSeq = 'HIon'
1116  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1117  self.VALIDATIONDefaultSeq=''
1118  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1119  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1120  self.RECODefaultSeq='reconstructionHeavyIons'
1121  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1122  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1123  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1124  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1125  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1126  if self._options.isMC==True:
1127  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1128 
1129 
1132  self.USERDefaultSeq='user'
1133  self.USERDefaultCFF=None
1135  # the magnetic field
1136  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1137  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1138 
1139  # the geometry
1140  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1142  simGeometry=''
1143  if self._options.fast:
1144  if 'start' in self._options.conditions.lower():
1145  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1146  else:
1147  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1148  else:
1149  def inGeometryKeys(opt):
1150  from Configuration.StandardSequences.GeometryConf import GeometryConf
1151  if opt in GeometryConf:
1152  return GeometryConf[opt]
1153  else:
1154  return opt
1155 
1156  geoms=self._options.geometry.split(',')
1157  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1158  if len(geoms)==2:
1159  #may specify the reco geometry
1160  if '/' in geoms[1] or '_cff' in geoms[1]:
1161  self.GeometryCFF=geoms[1]
1162  else:
1163  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1164 
1165  if (geoms[0].startswith('DB:')):
1166  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1167  self.geometryDBLabel=geoms[0][3:]
1168  print("with DB:")
1169  else:
1170  if '/' in geoms[0] or '_cff' in geoms[0]:
1171  self.SimGeometryCFF=geoms[0]
1172  else:
1173  simGeometry=geoms[0]
1174  if self._options.gflash==True:
1175  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1176  else:
1177  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1178 
1179  # synchronize the geometry configuration and the FullSimulation sequence to be used
1180  if simGeometry not in defaultOptions.geometryExtendedOptions:
1181  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1182 
1183  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1184  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1185  self._options.beamspot='NoSmear'
1186 
1187  # fastsim requires some changes to the default cff files and sequences
1188  if self._options.fast:
1189  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1190  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1191  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1192  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1193  self.NANODefaultSeq = 'nanoSequenceFS'
1194  self.DQMOFFLINEDefaultCFF="DQMOffline.Configuration.DQMOfflineFS_cff"
1195 
1196  # Mixing
1197  if self._options.pileup=='default':
1198  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1199  self._options.pileup=MixingDefaultKey
1200 
1201 
1202  #not driven by a default cff anymore
1203  if self._options.isData:
1204  self._options.pileup=None
1205 
1206 
1209  # for alca, skims, etc
1210  def addExtraStream(self, name, stream, workflow='full'):
1211  # define output module and go from there
1212  output = cms.OutputModule("PoolOutputModule")
1213  if stream.selectEvents.parameters_().__len__()!=0:
1214  output.SelectEvents = stream.selectEvents
1215  else:
1216  output.SelectEvents = cms.untracked.PSet()
1217  output.SelectEvents.SelectEvents=cms.vstring()
1218  if isinstance(stream.paths,tuple):
1219  for path in stream.paths:
1220  output.SelectEvents.SelectEvents.append(path.label())
1221  else:
1222  output.SelectEvents.SelectEvents.append(stream.paths.label())
1223 
1224 
1225 
1226  if isinstance(stream.content,str):
1227  evtPset=getattr(self.process,stream.content)
1228  for p in evtPset.parameters_():
1229  setattr(output,p,getattr(evtPset,p))
1230  if not self._options.inlineEventContent:
1231  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1232  return label
1233  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1234  else:
1235  output.outputCommands = stream.content
1236 
1237 
1238  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1239 
1240  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1241  filterName = cms.untracked.string(stream.name))
1242 
1243  if self._options.filtername:
1244  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1245 
1246  #add an automatic flushing to limit memory consumption
1247  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1248 
1249  if workflow in ("producers,full"):
1250  if isinstance(stream.paths,tuple):
1251  for path in stream.paths:
1252  self.schedule.append(path)
1253  else:
1254  self.schedule.append(stream.paths)
1255 
1256 
1257  # in case of relvals we don't want to have additional outputs
1258  if (not self._options.relval) and workflow in ("full","output"):
1259  self.additionalOutputs[name] = output
1260  setattr(self.process,name,output)
1261 
1262  if workflow == 'output':
1263  # adjust the select events to the proper trigger results from previous process
1264  filterList = output.SelectEvents.SelectEvents
1265  for i, filter in enumerate(filterList):
1266  filterList[i] = filter+":"+self._options.triggerResultsProcess
1267 
1268  return output
1269 
1270  #----------------------------------------------------------------------------
1271  # here the methods to create the steps. Of course we are doing magic here ;)
1272  # prepare_STEPNAME modifies self.process and what else's needed.
1273  #----------------------------------------------------------------------------
1274 
1275  def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ=''):
1276  _dotsplit = stepSpec.split('.')
1277  if ( len(_dotsplit)==1 ):
1278  if '/' in _dotsplit[0]:
1279  _sequence = defaultSEQ if defaultSEQ else stepSpec
1280  _cff = _dotsplit[0]
1281  else:
1282  _sequence = stepSpec
1283  _cff = defaultCFF
1284  elif ( len(_dotsplit)==2 ):
1285  _cff,_sequence = _dotsplit
1286  else:
1287  print("sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1288  print(stepSpec,"not recognized")
1289  raise
1290  l=self.loadAndRemember(_cff)
1291  return l,_sequence,_cff
1292 
1293  def scheduleSequence(self,seq,prefix,what='Path'):
1294  if '*' in seq:
1295  #create only one path with all sequences in it
1296  for i,s in enumerate(seq.split('*')):
1297  if i==0:
1298  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1299  else:
1300  p=getattr(self.process,prefix)
1301  tmp = getattr(self.process, s)
1302  if isinstance(tmp, cms.Task):
1303  p.associate(tmp)
1304  else:
1305  p+=tmp
1306  self.schedule.append(getattr(self.process,prefix))
1307  return
1308  else:
1309  #create as many path as many sequences
1310  if not '+' in seq:
1311  if self.nextScheduleIsConditional:
1312  self.conditionalPaths.append(prefix)
1313  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1314  self.schedule.append(getattr(self.process,prefix))
1315  else:
1316  for i,s in enumerate(seq.split('+')):
1317  sn=prefix+'%d'%(i)
1318  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1319  self.schedule.append(getattr(self.process,sn))
1320  return
1321 
1322  def scheduleSequenceAtEnd(self,seq,prefix):
1323  self.scheduleSequence(seq,prefix,what='EndPath')
1324  return
1325 
1326  def prepare_ALCAPRODUCER(self, stepSpec = None):
1327  self.prepare_ALCA(stepSpec, workflow = "producers")
1328 
1329  def prepare_ALCAOUTPUT(self, stepSpec = None):
1330  self.prepare_ALCA(stepSpec, workflow = "output")
1331 
1332  def prepare_ALCA(self, stepSpec = None, workflow = 'full'):
1333  """ Enrich the process with alca streams """
1334  alcaConfig,sequence,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.ALCADefaultCFF)
1335 
1336  MAXLEN=31 #the alca producer name should be shorter than 31 chars as per https://cms-talk.web.cern.ch/t/alcaprompt-datasets-not-loaded-in-dbs/11146/2
1337  # decide which ALCA paths to use
1338  alcaList = sequence.split("+")
1339  for alca in alcaList:
1340  if (len(alca)>MAXLEN):
1341  raise Exception("The following alca "+str(alca)+" name (with length "+str(len(alca))+" chars) cannot be accepted because it exceeds the DBS constraints on the length of the name of the ALCARECOs producers ("+str(MAXLEN)+")!")
1342 
1343  maxLevel=0
1344  from Configuration.AlCa.autoAlca import autoAlca, AlCaNoConcurrentLumis
1345  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1346  self.expandMapping(alcaList,autoAlca)
1347  self.AlCaPaths=[]
1348  for name in alcaConfig.__dict__:
1349  alcastream = getattr(alcaConfig,name)
1350  shortName = name.replace('ALCARECOStream','')
1351  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1352  if shortName in AlCaNoConcurrentLumis:
1353  print("Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".format(shortName))
1354  self._options.nConcurrentLumis = "1"
1355  self._options.nConcurrentIOVs = "1"
1356  output = self.addExtraStream(name,alcastream, workflow = workflow)
1357  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1358  self.AlCaPaths.append(shortName)
1359  if 'DQM' in alcaList:
1360  if not self._options.inlineEventContent and hasattr(self.process,name):
1361  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1362  else:
1363  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1364 
1365  #rename the HLT process name in the alca modules
1366  if self._options.hltProcess or 'HLT' in self.stepMap:
1367  if isinstance(alcastream.paths,tuple):
1368  for path in alcastream.paths:
1369  self.renameHLTprocessInSequence(path.label())
1370  else:
1371  self.renameHLTprocessInSequence(alcastream.paths.label())
1372 
1373  for i in range(alcaList.count(shortName)):
1374  alcaList.remove(shortName)
1375 
1376  # DQM needs a special handling
1377  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1378  path = getattr(alcaConfig,name)
1379  self.schedule.append(path)
1380  alcaList.remove('DQM')
1381 
1382  if isinstance(alcastream,cms.Path):
1383  #black list the alca path so that they do not appear in the cfg
1384  self.blacklist_paths.append(alcastream)
1385 
1386 
1387  if len(alcaList) != 0:
1388  available=[]
1389  for name in alcaConfig.__dict__:
1390  alcastream = getattr(alcaConfig,name)
1391  if isinstance(alcastream,cms.FilteredStream):
1392  available.append(name.replace('ALCARECOStream',''))
1393  print("The following alcas could not be found "+str(alcaList))
1394  print("available ",available)
1395  #print "verify your configuration, ignoring for now"
1396  raise Exception("The following alcas could not be found "+str(alcaList))
1397 
1398  def prepare_LHE(self, stepSpec = None):
1399  #load the fragment
1400 
1401  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1402  print("Loading lhe fragment from",loadFragment)
1403  __import__(loadFragment)
1404  self.process.load(loadFragment)
1405 
1406  self._options.inlineObjets+=','+stepSpec
1407 
1408  getattr(self.process,stepSpec).nEvents = int(self._options.number)
1409 
1410  #schedule it
1411  self.process.lhe_step = cms.Path( getattr( self.process,stepSpec) )
1412  self.excludedPaths.append("lhe_step")
1413  self.schedule.append( self.process.lhe_step )
1414 
1415  def prepare_GEN(self, stepSpec = None):
1416  """ load the fragment of generator configuration """
1417  loadFailure=False
1418  #remove trailing .py
1419  #support old style .cfi by changing into something.cfi into something_cfi
1420  #remove python/ from the name
1421  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1422  #standard location of fragments
1423  if not '/' in loadFragment:
1424  loadFragment='Configuration.Generator.'+loadFragment
1425  else:
1426  loadFragment=loadFragment.replace('/','.')
1427  try:
1428  print("Loading generator fragment from",loadFragment)
1429  __import__(loadFragment)
1430  except:
1431  loadFailure=True
1432  #if self.process.source and self.process.source.type_()=='EmptySource':
1433  if not (self._options.filein or self._options.dasquery):
1434  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1435 
1436  if not loadFailure:
1437  from Configuration.Generator.concurrentLumisDisable import noConcurrentLumiGenerators
1438 
1439  generatorModule=sys.modules[loadFragment]
1440  genModules=generatorModule.__dict__
1441  #remove lhe producer module since this should have been
1442  #imported instead in the LHE step
1443  if self.LHEDefaultSeq in genModules:
1444  del genModules[self.LHEDefaultSeq]
1445 
1446  if self._options.hideGen:
1447  self.loadAndRemember(loadFragment)
1448  else:
1449  self.process.load(loadFragment)
1450  # expose the objects from that fragment to the configuration
1451  import FWCore.ParameterSet.Modules as cmstypes
1452  for name in genModules:
1453  theObject = getattr(generatorModule,name)
1454  if isinstance(theObject, cmstypes._Module):
1455  self._options.inlineObjets=name+','+self._options.inlineObjets
1456  if theObject.type_() in noConcurrentLumiGenerators:
1457  print("Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".format(theObject.type_()))
1458  self._options.nConcurrentLumis = "1"
1459  self._options.nConcurrentIOVs = "1"
1460  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1461  self._options.inlineObjets+=','+name
1462 
1463  if stepSpec == self.GENDefaultSeq or stepSpec == 'pgen_genonly' or stepSpec == 'pgen_smear':
1464  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1465  self.productionFilterSequence = 'ProductionFilterSequence'
1466  elif 'generator' in genModules:
1467  self.productionFilterSequence = 'generator'
1468 
1469  """ Enrich the schedule with the rest of the generation step """
1470  _,_genSeqName,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.GENDefaultCFF)
1471 
1472  if True:
1473  try:
1474  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1475  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1476  self.loadAndRemember(cffToBeLoaded)
1477  except ImportError:
1478  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1479 
1480  if self._options.scenario == 'HeavyIons':
1481  if self._options.pileup=='HiMixGEN':
1482  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1483  elif self._options.pileup=='HiMixEmbGEN':
1484  self.loadAndRemember("Configuration/StandardSequences/GeneratorEmbMix_cff")
1485  else:
1486  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1487 
1488  self.process.generation_step = cms.Path( getattr(self.process,_genSeqName) )
1489  self.schedule.append(self.process.generation_step)
1490 
1491  #register to the genstepfilter the name of the path (static right now, but might evolve)
1492  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1493 
1494  if 'reGEN' in self.stepMap:
1495  #stop here
1496  return
1497 
1498  """ Enrich the schedule with the summary of the filter step """
1499  #the gen filter in the endpath
1500  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1501  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1502  return
1503 
1504  def prepare_SIM(self, stepSpec = None):
1505  """ Enrich the schedule with the simulation step"""
1506  _,_simSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.SIMDefaultCFF)
1507  if not self._options.fast:
1508  if self._options.gflash==True:
1509  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1510 
1511  if self._options.magField=='0T':
1512  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1513  else:
1514  if self._options.magField=='0T':
1515  self.executeAndRemember("process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1516 
1517  self.scheduleSequence(_simSeq,'simulation_step')
1518  return
1519 
1520  def prepare_DIGI(self, stepSpec = None):
1521  """ Enrich the schedule with the digitisation step"""
1522  _,_digiSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DIGIDefaultCFF)
1523 
1524  if self._options.gflash==True:
1525  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1526 
1527  if _digiSeq == 'pdigi_valid' or _digiSeq == 'pdigi_hi':
1528  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1529 
1530  if _digiSeq != 'pdigi_nogen' and _digiSeq != 'pdigi_valid_nogen' and _digiSeq != 'pdigi_hi_nogen' and not self.process.source.type_()=='EmptySource' and not self._options.filetype == "LHE":
1531  if self._options.inputEventContent=='':
1532  self._options.inputEventContent='REGEN'
1533  else:
1534  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1535 
1536 
1537  self.scheduleSequence(_digiSeq,'digitisation_step')
1538  return
1539 
1540  def prepare_CFWRITER(self, stepSpec = None):
1541  """ Enrich the schedule with the crossing frame writer step"""
1543  self.scheduleSequence('pcfw','cfwriter_step')
1544  return
1545 
1546  def prepare_DATAMIX(self, stepSpec = None):
1547  """ Enrich the schedule with the digitisation step"""
1549  self.scheduleSequence('pdatamix','datamixing_step')
1550 
1551  if self._options.pileup_input:
1552  theFiles=''
1553  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1554  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1555  elif self._options.pileup_input.startswith("filelist:"):
1556  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1557  else:
1558  theFiles=self._options.pileup_input.split(',')
1559  #print theFiles
1560  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1561 
1562  return
1563 
1564  def prepare_DIGI2RAW(self, stepSpec = None):
1565  _,_digi2rawSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DIGI2RAWDefaultCFF)
1566  self.scheduleSequence(_digi2rawSeq,'digi2raw_step')
1567  return
1568 
1569  def prepare_REPACK(self, stepSpec = None):
1570  _,_repackSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.REPACKDefaultCFF)
1571  self.scheduleSequence(_repackSeq,'digi2repack_step')
1572  return
1573 
1574  def prepare_L1(self, stepSpec = None):
1575  """ Enrich the schedule with the L1 simulation step"""
1576  assert(stepSpec == None)
1577  self.loadAndRemember(self.L1EMDefaultCFF)
1578  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1579  return
1580 
1581  def prepare_L1REPACK(self, stepSpec = None):
1582  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1583  supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT','CalouGT']
1584  if stepSpec in supported:
1585  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'% stepSpec)
1586  if self._options.scenario == 'HeavyIons':
1587  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1588  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1589  else:
1590  print("L1REPACK with '",stepSpec,"' is not supported! Supported choices are: ",supported)
1591  raise Exception('unsupported feature')
1592 
1593  def prepare_HLT(self, stepSpec = None):
1594  """ Enrich the schedule with the HLT simulation step"""
1595  if not stepSpec:
1596  print("no specification of the hlt menu has been given, should never happen")
1597  raise Exception('no HLT specifications provided')
1598 
1599  if '@' in stepSpec:
1600  # case where HLT:@something was provided
1601  from Configuration.HLT.autoHLT import autoHLT
1602  key = stepSpec[1:]
1603  if key in autoHLT:
1604  stepSpec = autoHLT[key]
1605  else:
1606  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1607 
1608  if ',' in stepSpec:
1609  #case where HLT:something:something was provided
1610  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1611  optionsForHLT = {}
1612  if self._options.scenario == 'HeavyIons':
1613  optionsForHLT['type'] = 'HIon'
1614  else:
1615  optionsForHLT['type'] = 'GRun'
1616  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.items())
1617  if stepSpec == 'run,fromSource':
1618  if hasattr(self.process.source,'firstRun'):
1619  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1620  elif hasattr(self.process.source,'setRunNumber'):
1621  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1622  else:
1623  raise Exception(f'Cannot replace menu to load {stepSpec}')
1624  else:
1625  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(stepSpec.replace(',',':'),optionsForHLTConfig))
1626  else:
1627  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % stepSpec)
1628 
1629  if self._options.isMC:
1630  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1631 
1632  if self._options.name != 'HLT':
1633  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1634  self.additionalCommands.append('process = ProcessName(process)')
1635  self.additionalCommands.append('')
1636  from HLTrigger.Configuration.CustomConfigs import ProcessName
1637  self.process = ProcessName(self.process)
1638 
1639  if self.process.schedule == None:
1640  raise Exception('the HLT step did not attach a valid schedule to the process')
1641 
1642  self.scheduleIndexOfFirstHLTPath = len(self.schedule)
1643  [self.blacklist_paths.append(path) for path in self.process.schedule if isinstance(path,(cms.Path,cms.EndPath))]
1644 
1645  # this is a fake, to be removed with fastim migration and HLT menu dump
1646  if self._options.fast:
1647  if not hasattr(self.process,'HLTEndSequence'):
1648  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1649 
1650 
1651  def prepare_RAW2RECO(self, stepSpec = None):
1652  if ','in stepSpec:
1653  seqReco,seqDigi=stepSpec.spli(',')
1654  else:
1655  print(f"RAW2RECO requires two specifications {stepSpec} insufficient")
1656 
1657  self.prepare_RAW2DIGI(seqDigi)
1658  self.prepare_RECO(seqReco)
1659  return
1660 
1661  def prepare_RAW2DIGI(self, stepSpec = "RawToDigi"):
1662  _,_raw2digiSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RAW2DIGIDefaultCFF)
1663  self.scheduleSequence(_raw2digiSeq,'raw2digi_step')
1664  return
1665 
1666  def prepare_PATFILTER(self, stepSpec = None):
1667  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1668  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1669  for filt in allMetFilterPaths:
1670  self.schedule.append(getattr(self.process,'Flag_'+filt))
1671 
1672  def prepare_L1HwVal(self, stepSpec = 'L1HwVal'):
1673  ''' Enrich the schedule with L1 HW validation '''
1674  self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1HwValDefaultCFF)
1675  print('\n\n\n DEPRECATED this has no action \n\n\n')
1676  return
1677 
1678  def prepare_L1Reco(self, stepSpec = "L1Reco"):
1679  ''' Enrich the schedule with L1 reconstruction '''
1680  _,_l1recoSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1RecoDefaultCFF)
1681  self.scheduleSequence(_l1recoSeq,'L1Reco_step')
1682  return
1683 
1684  def prepare_L1TrackTrigger(self, stepSpec = "L1TrackTrigger"):
1685  ''' Enrich the schedule with L1 reconstruction '''
1686  _,_l1tracktriggerSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1TrackTriggerDefaultCFF)
1687  self.scheduleSequence(_l1tracktriggerSeq,'L1TrackTrigger_step')
1688  return
1689 
1690  def prepare_FILTER(self, stepSpec = None):
1691  ''' Enrich the schedule with a user defined filter sequence '''
1692 
1693  filterConfig,filterSeq = stepSpec.split('.')
1694  filterConfig=self.load(filterConfig)
1695 
1696  class PrintAllModules(object):
1697  def __init__(self):
1698  self.inliner=''
1699  pass
1700  def enter(self,visitee):
1701  try:
1702  label=visitee.label()
1703 
1704  self.inliner=label+','+self.inliner
1705  except:
1706  pass
1707  def leave(self,v): pass
1708 
1709  expander=PrintAllModules()
1710  getattr(self.process,filterSeq).visit( expander )
1711  self._options.inlineObjets+=','+expander.inliner
1712  self._options.inlineObjets+=','+filterSeq
1713 
1714 
1715  self.scheduleSequence(filterSeq,'filtering_step')
1716  self.nextScheduleIsConditional=True
1717 
1718  self.productionFilterSequence = filterSeq
1719 
1720  return
1721 
1722  def prepare_RECO(self, stepSpec = "reconstruction"):
1723  ''' Enrich the schedule with reconstruction '''
1724  _,_recoSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RECODefaultCFF)
1725  self.scheduleSequence(_recoSeq,'reconstruction_step')
1726  return
1727 
1728  def prepare_RECOSIM(self, stepSpec = "recosim"):
1729  ''' Enrich the schedule with reconstruction '''
1730  _,_recosimSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RECOSIMDefaultCFF)
1731  self.scheduleSequence(_recosimSeq,'recosim_step')
1732  return
1733 
1734  def prepare_RECOBEFMIX(self, stepSpec = "reconstruction"):
1735  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1736  if not self._options.fast:
1737  print("ERROR: this step is only implemented for FastSim")
1738  sys.exit()
1739  _,_recobefmixSeq,_ = self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1740  self.scheduleSequence(_recobefmixSeq,'reconstruction_befmix_step')
1741  return
1742 
1743  def prepare_PAT(self, stepSpec = "miniAOD"):
1744  ''' Enrich the schedule with PAT '''
1745  self.prepare_PATFILTER(self)
1746  self.loadDefaultOrSpecifiedCFF(stepSpec,self.PATDefaultCFF)
1747  self.labelsToAssociate.append('patTask')
1748  if self._options.isData:
1749  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1750  else:
1751  if self._options.fast:
1752  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1753  else:
1754  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1755 
1756  if self._options.hltProcess:
1757  if len(self._options.customise_commands) > 1:
1758  self._options.customise_commands = self._options.customise_commands + " \n"
1759  self._options.customise_commands = self._options.customise_commands + "process.patTrigger.processName = \""+self._options.hltProcess+"\"\n"
1760  self._options.customise_commands = self._options.customise_commands + "process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1761  self._options.customise_commands = self._options.customise_commands + "process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1762 
1763 # self.renameHLTprocessInSequence(sequence)
1764 
1765  return
1766 
1767  def prepare_PATGEN(self, stepSpec = "miniGEN"):
1768  ''' Enrich the schedule with PATGEN '''
1769  self.loadDefaultOrSpecifiedCFF(stepSpec,self.PATGENDefaultCFF) #this is unscheduled
1770  self.labelsToAssociate.append('patGENTask')
1771  if self._options.isData:
1772  raise Exception("PATGEN step can only run on MC")
1773  return
1774 
1775  def prepare_NANO(self, stepSpec = '' ):
1776  print(f"in prepare_nano {stepSpec}")
1777  ''' Enrich the schedule with NANO '''
1778  _,_nanoSeq,_nanoCff = self.loadDefaultOrSpecifiedCFF(stepSpec,self.NANODefaultCFF,self.NANODefaultSeq)
1779  self.scheduleSequence(_nanoSeq,'nanoAOD_step')
1780  custom = "nanoAOD_customizeCommon"
1781  self._options.customisation_file.insert(0,'.'.join([_nanoCff,custom]))
1782  if self._options.hltProcess:
1783  if len(self._options.customise_commands) > 1:
1784  self._options.customise_commands = self._options.customise_commands + " \n"
1785  self._options.customise_commands = self._options.customise_commands + "process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1786 
1787  def prepare_NANOGEN(self, stepSpec = "nanoAOD"):
1788  ''' Enrich the schedule with NANOGEN '''
1789  # TODO: Need to modify this based on the input file type
1790  fromGen = any([x in self.stepMap for x in ['LHE', 'GEN', 'AOD']])
1791  _,_nanogenSeq,_nanogenCff = self.loadDefaultOrSpecifiedCFF(stepSpec,self.NANOGENDefaultCFF)
1792  self.scheduleSequence(_nanogenSeq,'nanoAOD_step')
1793  custom = "customizeNanoGEN" if fromGen else "customizeNanoGENFromMini"
1794  if self._options.runUnscheduled:
1795  self._options.customisation_file_unsch.insert(0, '.'.join([_nanogenCff, custom]))
1796  else:
1797  self._options.customisation_file.insert(0, '.'.join([_nanogenCff, custom]))
1798 
1799  def prepare_SKIM(self, stepSpec = "all"):
1800  ''' Enrich the schedule with skimming fragments'''
1801  skimConfig,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.SKIMDefaultCFF)
1802 
1803  stdHLTProcName = 'HLT'
1804  newHLTProcName = self._options.hltProcess
1805  customiseForReHLT = (newHLTProcName or (stdHLTProcName in self.stepMap)) and (newHLTProcName != stdHLTProcName)
1806  if customiseForReHLT:
1807  print("replacing %s process name - step SKIM:%s will use '%s'" % (stdHLTProcName, sequence, newHLTProcName))
1808 
1809 
1810  from Configuration.Skimming.autoSkim import autoSkim
1811  skimlist = sequence.split('+')
1812  self.expandMapping(skimlist,autoSkim)
1813 
1814  #print("dictionary for skims:", skimConfig.__dict__)
1815  for skim in skimConfig.__dict__:
1816  skimstream = getattr(skimConfig, skim)
1817 
1818  # blacklist AlCa paths so that they do not appear in the cfg
1819  if isinstance(skimstream, cms.Path):
1820  self.blacklist_paths.append(skimstream)
1821  # if enabled, apply "hltProcess" renaming to Sequences
1822  elif isinstance(skimstream, cms.Sequence):
1823  if customiseForReHLT:
1824  self.renameHLTprocessInSequence(skim, proc = newHLTProcName, HLTprocess = stdHLTProcName, verbosityLevel = 0)
1825 
1826  if not isinstance(skimstream, cms.FilteredStream):
1827  continue
1828 
1829  shortname = skim.replace('SKIMStream','')
1830  if (sequence=="all"):
1831  self.addExtraStream(skim,skimstream)
1832  elif (shortname in skimlist):
1833  self.addExtraStream(skim,skimstream)
1834  #add a DQM eventcontent for this guy
1835  if self._options.datatier=='DQM':
1836  self.process.load(self.EVTCONTDefaultCFF)
1837  skimstreamDQM = cms.FilteredStream(
1838  responsible = skimstream.responsible,
1839  name = skimstream.name+'DQM',
1840  paths = skimstream.paths,
1841  selectEvents = skimstream.selectEvents,
1842  content = self._options.datatier+'EventContent',
1843  dataTier = cms.untracked.string(self._options.datatier)
1844  )
1845  self.addExtraStream(skim+'DQM',skimstreamDQM)
1846  for i in range(skimlist.count(shortname)):
1847  skimlist.remove(shortname)
1848 
1849  if (skimlist.__len__()!=0 and sequence!="all"):
1850  print('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1851  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1852 
1853 
1854  def prepare_USER(self, stepSpec = None):
1855  ''' Enrich the schedule with a user defined sequence '''
1856  _,_userSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.USERDefaultCFF)
1857  self.scheduleSequence(_userSeq,'user_step')
1858  return
1859 
1860  def prepare_POSTRECO(self, stepSpec = None):
1861  """ Enrich the schedule with the postreco step """
1863  self.scheduleSequence('postreco_generator','postreco_step')
1864  return
1865 
1866 
1867  def prepare_VALIDATION(self, stepSpec = 'validation'):
1868  print(f"{stepSpec} in preparing validation")
1869  _,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.VALIDATIONDefaultCFF)
1870  from Validation.Configuration.autoValidation import autoValidation
1871  #in case VALIDATION:something:somethingelse -> something,somethingelse
1872  if sequence.find(',')!=-1:
1873  prevalSeqName=sequence.split(',')[0].split('+')
1874  valSeqName=sequence.split(',')[1].split('+')
1875  self.expandMapping(prevalSeqName,autoValidation,index=0)
1876  self.expandMapping(valSeqName,autoValidation,index=1)
1877  else:
1878  if '@' in sequence:
1879  prevalSeqName=sequence.split('+')
1880  valSeqName=sequence.split('+')
1881  self.expandMapping(prevalSeqName,autoValidation,index=0)
1882  self.expandMapping(valSeqName,autoValidation,index=1)
1883  else:
1884  postfix=''
1885  if sequence:
1886  postfix='_'+sequence
1887  prevalSeqName=['prevalidation'+postfix]
1888  valSeqName=['validation'+postfix]
1889  if not hasattr(self.process,valSeqName[0]):
1890  prevalSeqName=['']
1891  valSeqName=[sequence]
1892 
1893  def NFI(index):
1894 
1895  if index==0:
1896  return ''
1897  else:
1898  return '%s'%index
1899 
1900 
1901  #rename the HLT process in validation steps
1902  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1903  for s in valSeqName+prevalSeqName:
1904  if s:
1906  for (i,s) in enumerate(prevalSeqName):
1907  if s:
1908  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1909  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1910 
1911  for (i,s) in enumerate(valSeqName):
1912  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1913  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1914 
1915  #needed in case the miniAODValidation sequence is run starting from AODSIM
1916  if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1917  return
1918 
1919  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1920  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1921  self._options.restoreRNDSeeds=True
1922 
1923  if not 'DIGI' in self.stepMap and not self._options.isData and not self._options.fast:
1924  self.executeAndRemember("process.mix.playback = True")
1925  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1926  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1927  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1928 
1929  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1930  #will get in the schedule, smoothly
1931  for (i,s) in enumerate(valSeqName):
1932  getattr(self.process,'validation_step%s'%NFI(i)).insert(0, self.process.genstepfilter)
1933 
1934  return
1935 
1936 
1938  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1939  It will climb down within PSets, VPSets and VInputTags to find its target"""
1940  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1941  self._paramReplace = paramReplace
1942  self._paramSearch = paramSearch
1943  self._verbose = verbose
1944  self._whitelist = whitelist
1946  def doIt(self, pset, base):
1947  if isinstance(pset, cms._Parameterizable):
1948  for name in pset.parameters_().keys():
1949  # skip whitelisted parameters
1950  if name in self._whitelist:
1951  continue
1952  # if I use pset.parameters_().items() I get copies of the parameter values
1953  # so I can't modify the nested pset
1954  value = getattr(pset, name)
1955  valueType = type(value)
1956  if valueType in [cms.PSet, cms.untracked.PSet, cms.EDProducer]:
1957  self.doIt(value,base+"."+name)
1958  elif valueType in [cms.VPSet, cms.untracked.VPSet]:
1959  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1960  elif valueType in [cms.string, cms.untracked.string]:
1961  if value.value() == self._paramSearch:
1962  if self._verbose: print("set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace))
1963  setattr(pset, name,self._paramReplace)
1964  elif valueType in [cms.VInputTag, cms.untracked.VInputTag]:
1965  for (i,n) in enumerate(value):
1966  if not isinstance(n, cms.InputTag):
1967  n=cms.InputTag(n)
1968  if n.processName == self._paramSearch:
1969  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1970  if self._verbose:print("set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace))
1971  setattr(n,"processName",self._paramReplace)
1972  value[i]=n
1973  elif valueType in [cms.vstring, cms.untracked.vstring]:
1974  for (i,n) in enumerate(value):
1975  if n==self._paramSearch:
1976  getattr(pset,name)[i]=self._paramReplace
1977  elif valueType in [cms.InputTag, cms.untracked.InputTag]:
1978  if value.processName == self._paramSearch:
1979  if self._verbose: print("set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace))
1980  setattr(getattr(pset, name),"processName",self._paramReplace)
1981 
1982  def enter(self,visitee):
1983  label = ''
1984  try:
1985  label = visitee.label()
1986  except AttributeError:
1987  label = '<Module not in a Process>'
1988  except:
1989  label = 'other execption'
1990  self.doIt(visitee, label)
1991 
1992  def leave(self,visitee):
1993  pass
1994 
1995  #visit a sequence to repalce all input tags
1996  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1997  print("Replacing all InputTag %s => %s"%(oldT,newT))
1998  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1999  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
2000  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
2001  if not loadMe in self.additionalCommands:
2002  self.additionalCommands.append(loadMe)
2003  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
2004 
2005  #change the process name used to address HLT results in any sequence
2006  def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT', verbosityLevel=1):
2007  if proc == None:
2008  proc = self._options.hltProcess if self._options.hltProcess else self.process.name_()
2009  if proc == HLTprocess:
2010  return
2011  # look up all module in sequence
2012  if verbosityLevel > 0:
2013  print("replacing %s process name - sequence %s will use '%s'" % (HLTprocess, sequence, proc))
2014  verboseVisit = (verbosityLevel > 1)
2015  getattr(self.process,sequence).visit(
2016  ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess, proc, whitelist = ("subSystemFolder",), verbose = verboseVisit))
2017  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
2018  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
2020  'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",), verbose = %s))'
2021  % (sequence, HLTprocess, proc, verboseVisit))
2022 
2023  def expandMapping(self,seqList,mapping,index=None):
2024  maxLevel=30
2025  level=0
2026  while '@' in repr(seqList) and level<maxLevel:
2027  level+=1
2028  for specifiedCommand in seqList:
2029  if specifiedCommand.startswith('@'):
2030  location=specifiedCommand[1:]
2031  if not location in mapping:
2032  raise Exception("Impossible to map "+location+" from "+repr(mapping))
2033  mappedTo=mapping[location]
2034  if index!=None:
2035  mappedTo=mappedTo[index]
2036  seqList.remove(specifiedCommand)
2037  seqList.extend(mappedTo.split('+'))
2038  break;
2039  if level==maxLevel:
2040  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
2041 
2042  def prepare_DQM(self, stepSpec = 'DQMOffline'):
2043  # this one needs replacement
2044 
2045  # any 'DQM' job should use DQMStore in non-legacy mode (but not HARVESTING)
2046  self.loadAndRemember("DQMServices/Core/DQMStoreNonLegacy_cff")
2047  _,_dqmSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DQMOFFLINEDefaultCFF)
2048  sequenceList=_dqmSeq.split('+')
2049  postSequenceList=_dqmSeq.split('+')
2050  from DQMOffline.Configuration.autoDQM import autoDQM
2051  self.expandMapping(sequenceList,autoDQM,index=0)
2052  self.expandMapping(postSequenceList,autoDQM,index=1)
2053 
2054  if len(set(sequenceList))!=len(sequenceList):
2055  sequenceList=list(OrderedSet(sequenceList))
2056  print("Duplicate entries for DQM:, using",sequenceList)
2057 
2058  pathName='dqmoffline_step'
2059  for (i,_sequence) in enumerate(sequenceList):
2060  if (i!=0):
2061  pathName='dqmoffline_%d_step'%(i)
2062 
2063  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
2064  self.renameHLTprocessInSequence(_sequence)
2065 
2066  setattr(self.process,pathName, cms.EndPath( getattr(self.process,_sequence ) ) )
2067  self.schedule.append(getattr(self.process,pathName))
2068 
2069  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
2070  #will get in the schedule, smoothly
2071  getattr(self.process,pathName).insert(0,self.process.genstepfilter)
2072 
2073 
2074  pathName='dqmofflineOnPAT_step'
2075  for (i,_sequence) in enumerate(postSequenceList):
2076  #Fix needed to avoid duplication of sequences not defined in autoDQM or without a PostDQM
2077  if (sequenceList[i]==postSequenceList[i]):
2078  continue
2079  if (i!=0):
2080  pathName='dqmofflineOnPAT_%d_step'%(i)
2081 
2082  setattr(self.process,pathName, cms.EndPath( getattr(self.process, _sequence ) ) )
2083  self.schedule.append(getattr(self.process,pathName))
2084 
2085  def prepare_HARVESTING(self, stepSpec = None):
2086  """ Enrich the process with harvesting step """
2087  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
2089 
2090  harvestingConfig,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.HARVESTINGDefaultCFF)
2091 
2092  # decide which HARVESTING paths to use
2093  harvestingList = sequence.split("+")
2094  from DQMOffline.Configuration.autoDQM import autoDQM
2095  from Validation.Configuration.autoValidation import autoValidation
2096  import copy
2097  combined_mapping = copy.deepcopy( autoDQM )
2098  combined_mapping.update( autoValidation )
2099  self.expandMapping(harvestingList,combined_mapping,index=-1)
2100 
2101  if len(set(harvestingList))!=len(harvestingList):
2102  harvestingList=list(OrderedSet(harvestingList))
2103  print("Duplicate entries for HARVESTING, using",harvestingList)
2104 
2105  for name in harvestingList:
2106  if not name in harvestingConfig.__dict__:
2107  print(name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2108  # trigger hard error, like for other sequence types
2109  getattr(self.process, name)
2110  continue
2111  harvestingstream = getattr(harvestingConfig,name)
2112  if isinstance(harvestingstream,cms.Path):
2113  self.schedule.append(harvestingstream)
2114  self.blacklist_paths.append(harvestingstream)
2115  if isinstance(harvestingstream,cms.Sequence):
2116  setattr(self.process,name+"_step",cms.Path(harvestingstream))
2117  self.schedule.append(getattr(self.process,name+"_step"))
2118 
2119  # # NOTE: the "hltProcess" option currently does nothing in the HARVEST step
2120  # if self._options.hltProcess or ('HLT' in self.stepMap):
2121  # pass
2122 
2123  self.scheduleSequence('DQMSaver','dqmsave_step')
2124  return
2125 
2126  def prepare_ALCAHARVEST(self, stepSpec = None):
2127  """ Enrich the process with AlCaHarvesting step """
2128  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2129  sequence=stepSpec.split(".")[-1]
2130 
2131  # decide which AlcaHARVESTING paths to use
2132  harvestingList = sequence.split("+")
2133 
2134 
2135 
2136  from Configuration.AlCa.autoPCL import autoPCL
2137  self.expandMapping(harvestingList,autoPCL)
2138 
2139  for name in harvestingConfig.__dict__:
2140  harvestingstream = getattr(harvestingConfig,name)
2141  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2142  self.schedule.append(harvestingstream)
2143  if isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_dbOutput"), cms.VPSet) and \
2144  isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_metadata"), cms.VPSet):
2145  self.executeAndRemember("process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name + "_dbOutput)")
2146  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name + "_metadata)")
2147  else:
2148  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2149  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2150  harvestingList.remove(name)
2151  # append the common part at the end of the sequence
2152  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2153  self.schedule.append(lastStep)
2154 
2155  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2156  print("The following harvesting could not be found : ", harvestingList)
2157  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2158 
2159 
2160 
2161  def prepare_ENDJOB(self, stepSpec = 'endOfProcess'):
2162  _,_endjobSeq,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.ENDJOBDefaultCFF)
2163  self.scheduleSequenceAtEnd(_endjobSeq,'endjob_step')
2164  return
2165 
2166  def finalizeFastSimHLT(self):
2167  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2168  self.schedule.append(self.process.reconstruction)
2169 
2170 
2171  def build_production_info(self, evt_type, evtnumber):
2172  """ Add useful info for the production. """
2173  self.process.configurationMetadata=cms.untracked.PSet\
2174  (version=cms.untracked.string("$Revision: 1.19 $"),
2175  name=cms.untracked.string("Applications"),
2176  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2177  )
2178 
2179  self.addedObjects.append(("Production Info","configurationMetadata"))
2180 
2181 
2182  def create_process(self):
2183  self.pythonCfgCode = "# Auto generated configuration file\n"
2184  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2185  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2186  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2187 
2188  # now set up the modifies
2189  modifiers=[]
2190  modifierStrings=[]
2191  modifierImports=[]
2192 
2193  if hasattr(self._options,"era") and self._options.era :
2194  # Multiple eras can be specified in a comma seperated list
2195  from Configuration.StandardSequences.Eras import eras
2196  for requestedEra in self._options.era.split(",") :
2197  modifierStrings.append(requestedEra)
2198  modifierImports.append(eras.pythonCfgLines[requestedEra])
2199  modifiers.append(getattr(eras,requestedEra))
2200 
2201 
2202  if hasattr(self._options,"procModifiers") and self._options.procModifiers:
2203  import importlib
2204  thingsImported=[]
2205  for c in self._options.procModifiers:
2206  thingsImported.extend(c.split(","))
2207  for pm in thingsImported:
2208  modifierStrings.append(pm)
2209  modifierImports.append('from Configuration.ProcessModifiers.'+pm+'_cff import '+pm)
2210  modifiers.append(getattr(importlib.import_module('Configuration.ProcessModifiers.'+pm+'_cff'),pm))
2211 
2212  self.pythonCfgCode += '\n'.join(modifierImports)+'\n\n'
2213  self.pythonCfgCode += "process = cms.Process('"+self._options.name+"'" # Start of the line, finished after the loop
2214 
2215 
2216  if len(modifierStrings)>0:
2217  self.pythonCfgCode+= ','+','.join(modifierStrings)
2218  self.pythonCfgCode+=')\n\n'
2219 
2220  #yes, the cfg code gets out of sync here if a process is passed in. That could be fixed in the future
2221  #assuming there is some way for the fwk to get the list of modifiers (and their stringified name)
2222  if self.process == None:
2223  if len(modifiers)>0:
2224  self.process = cms.Process(self._options.name,*modifiers)
2225  else:
2226  self.process = cms.Process(self._options.name)
2227 
2228 
2229 
2230 
2231  def prepare(self, doChecking = False):
2232  """ Prepare the configuration string and add missing pieces."""
2233 
2234  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2235  self.addMaxEvents()
2236  if self.with_input:
2237  self.addSource()
2238  self.addStandardSequences()
2239 
2240  self.completeInputCommand()
2241  self.addConditions()
2242 
2243 
2244  outputModuleCfgCode=""
2245  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2246  outputModuleCfgCode=self.addOutput()
2247 
2248  self.addCommon()
2249 
2250  self.pythonCfgCode += "# import of standard configurations\n"
2251  for module in self.imports:
2252  self.pythonCfgCode += ("process.load('"+module+"')\n")
2253 
2254  # production info
2255  if not hasattr(self.process,"configurationMetadata"):
2256  self.build_production_info(self._options.evt_type, self._options.number)
2257  else:
2258  #the PSet was added via a load
2259  self.addedObjects.append(("Production Info","configurationMetadata"))
2260 
2261  self.pythonCfgCode +="\n"
2262  for comment,object in self.addedObjects:
2263  if comment!="":
2264  self.pythonCfgCode += "\n# "+comment+"\n"
2265  self.pythonCfgCode += dumpPython(self.process,object)
2266 
2267  # dump the output definition
2268  self.pythonCfgCode += "\n# Output definition\n"
2269  self.pythonCfgCode += outputModuleCfgCode
2270 
2271  # dump all additional outputs (e.g. alca or skim streams)
2272  self.pythonCfgCode += "\n# Additional output definition\n"
2273  #I do not understand why the keys are not normally ordered.
2274  nl=sorted(self.additionalOutputs.keys())
2275  for name in nl:
2276  output = self.additionalOutputs[name]
2277  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2278  tmpOut = cms.EndPath(output)
2279  setattr(self.process,name+'OutPath',tmpOut)
2280  self.schedule.append(tmpOut)
2281 
2282  # dump all additional commands
2283  self.pythonCfgCode += "\n# Other statements\n"
2284  for command in self.additionalCommands:
2285  self.pythonCfgCode += command + "\n"
2286 
2287  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2288  for object in self._options.inlineObjets.split(','):
2289  if not object:
2290  continue
2291  if not hasattr(self.process,object):
2292  print('cannot inline -'+object+'- : not known')
2293  else:
2294  self.pythonCfgCode +='\n'
2295  self.pythonCfgCode +=dumpPython(self.process,object)
2296 
2297  if self._options.pileup=='HiMixEmbGEN':
2298  self.pythonCfgCode += "\nprocess.generator.embeddingMode=cms.int32(1)\n"
2299 
2300  # dump all paths
2301  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2302  for path in self.process.paths:
2303  if getattr(self.process,path) not in self.blacklist_paths:
2304  self.pythonCfgCode += dumpPython(self.process,path)
2305 
2306  for endpath in self.process.endpaths:
2307  if getattr(self.process,endpath) not in self.blacklist_paths:
2308  self.pythonCfgCode += dumpPython(self.process,endpath)
2309 
2310  # dump the schedule
2311  self.pythonCfgCode += "\n# Schedule definition\n"
2312 
2313  # handling of the schedule
2314  pathNames = ['process.'+p.label_() for p in self.schedule]
2315  if self.process.schedule == None:
2316  self.process.schedule = cms.Schedule()
2317  for item in self.schedule:
2318  self.process.schedule.append(item)
2319  result = 'process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2320  else:
2321  if not isinstance(self.scheduleIndexOfFirstHLTPath, int):
2322  raise Exception('the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2323 
2324  for index, item in enumerate(self.schedule):
2325  if index < self.scheduleIndexOfFirstHLTPath:
2326  self.process.schedule.insert(index, item)
2327  else:
2328  self.process.schedule.append(item)
2329 
2330  result = "# process.schedule imported from cff in HLTrigger.Configuration\n"
2331  for index, item in enumerate(pathNames[:self.scheduleIndexOfFirstHLTPath]):
2332  result += 'process.schedule.insert('+str(index)+', '+item+')\n'
2333  if self.scheduleIndexOfFirstHLTPath < len(pathNames):
2334  result += 'process.schedule.extend(['+','.join(pathNames[self.scheduleIndexOfFirstHLTPath:])+'])\n'
2335 
2336  self.pythonCfgCode += result
2337 
2338  for labelToAssociate in self.labelsToAssociate:
2339  self.process.schedule.associate(getattr(self.process, labelToAssociate))
2340  self.pythonCfgCode += 'process.schedule.associate(process.' + labelToAssociate + ')\n'
2341 
2342  from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
2344  self.pythonCfgCode+="from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n"
2345  self.pythonCfgCode+="associatePatAlgosToolsTask(process)\n"
2346 
2347  overrideThreads = (self._options.nThreads != "1")
2348  overrideConcurrentLumis = (self._options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2349  overrideConcurrentIOVs = (self._options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2350 
2351  if overrideThreads or overrideConcurrentLumis or overrideConcurrentIOVs:
2352  self.pythonCfgCode +="\n"
2353  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2354  if overrideThreads:
2355  self.pythonCfgCode +="process.options.numberOfThreads = "+self._options.nThreads+"\n"
2356  self.pythonCfgCode +="process.options.numberOfStreams = "+self._options.nStreams+"\n"
2357  self.process.options.numberOfThreads = int(self._options.nThreads)
2358  self.process.options.numberOfStreams = int(self._options.nStreams)
2359  if overrideConcurrentLumis:
2360  self.pythonCfgCode +="process.options.numberOfConcurrentLuminosityBlocks = "+self._options.nConcurrentLumis+"\n"
2361  self.process.options.numberOfConcurrentLuminosityBlocks = int(self._options.nConcurrentLumis)
2362  if overrideConcurrentIOVs:
2363  self.pythonCfgCode +="process.options.eventSetup.numberOfConcurrentIOVs = "+self._options.nConcurrentIOVs+"\n"
2364  self.process.options.eventSetup.numberOfConcurrentIOVs = int(self._options.nConcurrentIOVs)
2365 
2366  if self._options.accelerators is not None:
2367  accelerators = self._options.accelerators.split(',')
2368  self.pythonCfgCode += "\n"
2369  self.pythonCfgCode += "# Enable only these accelerator backends\n"
2370  self.pythonCfgCode += "process.load('Configuration.StandardSequences.Accelerators_cff')\n"
2371  self.pythonCfgCode += "process.options.accelerators = ['" + "', '".join(accelerators) + "']\n"
2372  self.process.load('Configuration.StandardSequences.Accelerators_cff')
2373  self.process.options.accelerators = accelerators
2374 
2375  #repacked version
2376  if self._options.isRepacked:
2377  self.pythonCfgCode +="\n"
2378  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2379  self.pythonCfgCode +="MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n"
2380  MassReplaceInputTag(self.process, new="rawDataMapperByLabel", old="rawDataCollector")
2381 
2382  # special treatment in case of production filter sequence 2/2
2383  if self.productionFilterSequence and not (self._options.pileup=='HiMixEmbGEN'):
2384  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2385  self.pythonCfgCode +='for path in process.paths:\n'
2386  if len(self.conditionalPaths):
2387  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2388  if len(self.excludedPaths):
2389  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2390  self.pythonCfgCode +='\tgetattr(process,path).insert(0, process.%s)\n'%(self.productionFilterSequence,)
2391  pfs = getattr(self.process,self.productionFilterSequence)
2392  for path in self.process.paths:
2393  if not path in self.conditionalPaths: continue
2394  if path in self.excludedPaths: continue
2395  getattr(self.process,path).insert(0, pfs)
2396 
2397 
2398  # dump customise fragment
2399  self.pythonCfgCode += self.addCustomise()
2400 
2401  if self._options.runUnscheduled:
2402  print("--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2403  # Keep the "unscheduled customise functions" separate for now,
2404  # there are customize functions given by users (in our unit
2405  # tests) that need to be run before the "unscheduled customise
2406  # functions"
2407  self.pythonCfgCode += self.addCustomise(1)
2408 
2409  self.pythonCfgCode += self.addCustomiseCmdLine()
2410 
2411  if hasattr(self.process,"logErrorHarvester"):
2412  #configure logErrorHarvester to wait for same EDProducers to finish as the OutputModules
2413  self.pythonCfgCode +="\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n"
2414  self.pythonCfgCode +="from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n"
2415  self.pythonCfgCode +="process = customiseLogErrorHarvesterUsingOutputCommands(process)\n"
2416  from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands
2418 
2419  # Temporary hack to put the early delete customization after
2420  # everything else
2421  #
2422  # FIXME: remove when no longer needed
2423  self.pythonCfgCode += "\n# Add early deletion of temporary data products to reduce peak memory need\n"
2424  self.pythonCfgCode += "from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2425  self.pythonCfgCode += "process = customiseEarlyDelete(process)\n"
2426  self.pythonCfgCode += "# End adding early deletion\n"
2427  from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
2428  self.process = customiseEarlyDelete(self.process)
2429 
2430  imports = cms.specialImportRegistry.getSpecialImports()
2431  if len(imports) > 0:
2432  #need to inject this at the top
2433  index = self.pythonCfgCode.find("import FWCore.ParameterSet.Config")
2434  #now find the end of line
2435  index = self.pythonCfgCode.find("\n",index)
2436  self.pythonCfgCode = self.pythonCfgCode[:index]+ "\n" + "\n".join(imports)+"\n" +self.pythonCfgCode[index:]
2437 
2438 
2439  # make the .io file
2440 
2441  if self._options.io:
2442  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2443  if not self._options.io.endswith('.io'): self._option.io+='.io'
2444  io=open(self._options.io,'w')
2445  ioJson={}
2446  if hasattr(self.process.source,"fileNames"):
2447  if len(self.process.source.fileNames.value()):
2448  ioJson['primary']=self.process.source.fileNames.value()
2449  if hasattr(self.process.source,"secondaryFileNames"):
2450  if len(self.process.source.secondaryFileNames.value()):
2451  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2452  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2453  ioJson['pileup']=self._options.pileup_input[4:]
2454  for (o,om) in self.process.outputModules_().items():
2455  ioJson[o]=om.fileName.value()
2456  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2457  if self.productionFilterSequence:
2458  ioJson['filter']=self.productionFilterSequence
2459  import json
2460  io.write(json.dumps(ioJson))
2461  return
2462 
2463 
def load(self, includeFile)
def prepare_L1(self, stepSpec=None)
def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ='')
def expandMapping(self, seqList, mapping, index=None)
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT', verbosityLevel=1)
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:37
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
Definition: MassReplace.py:79
inliner
load the relevant part
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def prepare_CFWRITER(self, stepSpec=None)
def prepare_RECOBEFMIX(self, stepSpec="reconstruction")
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
Definition: FindCaloHit.cc:19
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, stepSpec=None)
assert(be >=bs)
def build_production_info(self, evt_type, evtnumber)
def ProcessName(process)
Definition: CustomConfigs.py:6
def prepare_RECOSIM(self, stepSpec="recosim")
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def prepare_L1HwVal(self, stepSpec='L1HwVal')
def prepare_DIGI2RAW(self, stepSpec=None)
def prepare_POSTRECO(self, stepSpec=None)
def prepare_SKIM(self, stepSpec="all")
def prepare_ALCAPRODUCER(self, stepSpec=None)
def prepare_HARVESTING(self, stepSpec=None)
def prepare_ALCAOUTPUT(self, stepSpec=None)
def prepare_RAW2DIGI(self, stepSpec="RawToDigi")
def prepare_GEN(self, stepSpec=None)
def prepare_FILTER(self, stepSpec=None)
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
def prepare_PAT(self, stepSpec="miniAOD")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_L1Reco(self, stepSpec="L1Reco")
def prepare_HLT(self, stepSpec=None)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
Definition: HCMethods.h:50
def prepare_DIGI(self, stepSpec=None)
def loadAndRemember(self, includeFile)
def prepare_ENDJOB(self, stepSpec='endOfProcess')
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_DQM(self, stepSpec='DQMOffline')
def prepare_ALCAHARVEST(self, stepSpec=None)
static std::string join(char **cmd)
Definition: RemoteFile.cc:19
def prepare_USER(self, stepSpec=None)
def prepare_ALCA(self, stepSpec=None, workflow='full')
def defineMixing(dict)
Definition: Mixing.py:207
def dumpPython(process, name)
def miniAOD_customizeOutput(out)
def encode(args, files)
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
Definition: helpers.py:24
def prepare_REPACK(self, stepSpec=None)
def prepare_NANOGEN(self, stepSpec="nanoAOD")
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_L1REPACK(self, stepSpec=None)
def prepare_L1TrackTrigger(self, stepSpec="L1TrackTrigger")
def prepare_RAW2RECO(self, stepSpec=None)
def prepare_NANO(self, stepSpec='')
def prepare_VALIDATION(self, stepSpec='validation')
def lumi_to_run(runs, events_in_sample, events_per_job)
Definition: LumiToRun.py:1
def scheduleSequenceAtEnd(self, seq, prefix)
#define str(s)
def prepare_RECO(self, stepSpec="reconstruction")
def prepare_SIM(self, stepSpec=None)
def filesFromList(fileName, s=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def prepare_PATGEN(self, stepSpec="miniGEN")
def prepare_LHE(self, stepSpec=None)
def prepare_DATAMIX(self, stepSpec=None)
def executeAndRemember(self, command)
nextScheduleIsConditional
put the filtering path in the schedule