CMS 3D CMS Logo

ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python3
2 
3 __version__ = "$Revision: 1.19 $"
4 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
5 
6 import FWCore.ParameterSet.Config as cms
7 from FWCore.ParameterSet.Modules import _Module
8 # The following import is provided for backward compatibility reasons.
9 # The function used to be defined in this file.
10 from FWCore.ParameterSet.MassReplace import massReplaceInputTag as MassReplaceInputTag
11 
12 import hashlib
13 import sys
14 import re
15 import collections
16 from subprocess import Popen,PIPE
17 import FWCore.ParameterSet.DictTypes as DictTypes
18 from FWCore.ParameterSet.OrderedSet import OrderedSet
19 class Options:
20  pass
21 
22 # the canonical defaults
23 defaultOptions = Options()
24 defaultOptions.datamix = 'DataOnSim'
25 defaultOptions.isMC=False
26 defaultOptions.isData=True
27 defaultOptions.step=''
28 defaultOptions.pileup='NoPileUp'
29 defaultOptions.pileup_input = None
30 defaultOptions.pileup_dasoption = ''
31 defaultOptions.geometry = 'SimDB'
32 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
33 defaultOptions.magField = ''
34 defaultOptions.conditions = None
35 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
36 defaultOptions.harvesting= 'AtRunEnd'
37 defaultOptions.gflash = False
38 defaultOptions.number = -1
39 defaultOptions.number_out = None
40 defaultOptions.arguments = ""
41 defaultOptions.name = "NO NAME GIVEN"
42 defaultOptions.evt_type = ""
43 defaultOptions.filein = ""
44 defaultOptions.dasquery=""
45 defaultOptions.dasoption=""
46 defaultOptions.secondfilein = ""
47 defaultOptions.customisation_file = []
48 defaultOptions.customisation_file_unsch = []
49 defaultOptions.customise_commands = ""
50 defaultOptions.inline_custom=False
51 defaultOptions.particleTable = 'pythiapdt'
52 defaultOptions.particleTableList = ['pythiapdt','pdt']
53 defaultOptions.dirin = ''
54 defaultOptions.dirout = ''
55 defaultOptions.filetype = 'EDM'
56 defaultOptions.fileout = 'output.root'
57 defaultOptions.filtername = ''
58 defaultOptions.lazy_download = False
59 defaultOptions.custom_conditions = ''
60 defaultOptions.hltProcess = ''
61 defaultOptions.eventcontent = None
62 defaultOptions.datatier = None
63 defaultOptions.inlineEventContent = True
64 defaultOptions.inlineObjects =''
65 defaultOptions.hideGen=False
66 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
67 defaultOptions.beamspot=None
68 defaultOptions.outputDefinition =''
69 defaultOptions.inputCommands = None
70 defaultOptions.outputCommands = None
71 defaultOptions.inputEventContent = ''
72 defaultOptions.dropDescendant = False
73 defaultOptions.relval = None
74 defaultOptions.profile = None
75 defaultOptions.heap_profile = None
76 defaultOptions.isRepacked = False
77 defaultOptions.restoreRNDSeeds = False
78 defaultOptions.donotDropOnInput = ''
79 defaultOptions.python_filename =''
80 defaultOptions.io=None
81 defaultOptions.lumiToProcess=None
82 defaultOptions.fast=False
83 defaultOptions.runsAndWeightsForMC = None
84 defaultOptions.runsScenarioForMC = None
85 defaultOptions.runsAndWeightsForMCIntegerWeights = None
86 defaultOptions.runsScenarioForMCIntegerWeights = None
87 defaultOptions.runUnscheduled = False
88 defaultOptions.timeoutOutput = False
89 defaultOptions.nThreads = 1
90 defaultOptions.nStreams = 0
91 defaultOptions.nConcurrentLumis = 0
92 defaultOptions.nConcurrentIOVs = 0
93 defaultOptions.accelerators = None
94 
95 # some helper routines
96 def dumpPython(process,name):
97  theObject = getattr(process,name)
98  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
99  return "process."+name+" = " + theObject.dumpPython()
100  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
101  return "process."+name+" = " + theObject.dumpPython()+"\n"
102  else:
103  return "process."+name+" = " + theObject.dumpPython()+"\n"
104 def filesFromList(fileName,s=None):
105  import os
106  import FWCore.ParameterSet.Config as cms
107  prim=[]
108  sec=[]
109  for line in open(fileName,'r'):
110  if line.count(".root")>=2:
111  #two files solution...
112  entries=line.replace("\n","").split()
113  prim.append(entries[0])
114  sec.append(entries[1])
115  elif (line.find(".root")!=-1):
116  entry=line.replace("\n","")
117  prim.append(entry)
118  # remove any duplicates but keep the order
119  file_seen = set()
120  prim = [f for f in prim if not (f in file_seen or file_seen.add(f))]
121  file_seen = set()
122  sec = [f for f in sec if not (f in file_seen or file_seen.add(f))]
123  if s:
124  if not hasattr(s,"fileNames"):
125  s.fileNames=cms.untracked.vstring(prim)
126  else:
127  s.fileNames.extend(prim)
128  if len(sec)!=0:
129  if not hasattr(s,"secondaryFileNames"):
130  s.secondaryFileNames=cms.untracked.vstring(sec)
131  else:
132  s.secondaryFileNames.extend(sec)
133  print("found files: ",prim)
134  if len(prim)==0:
135  raise Exception("There are not files in input from the file list")
136  if len(sec)!=0:
137  print("found parent files:",sec)
138  return (prim,sec)
139 
140 def filesFromDASQuery(query,option="",s=None):
141  import os,time
142  import FWCore.ParameterSet.Config as cms
143  prim=[]
144  sec=[]
145  print("the query is",query)
146  eC=5
147  count=0
148  while eC!=0 and count<3:
149  if count!=0:
150  print('Sleeping, then retrying DAS')
151  time.sleep(100)
152  p = Popen('dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=True, universal_newlines=True)
153  pipe=p.stdout.read()
154  tupleP = os.waitpid(p.pid, 0)
155  eC=tupleP[1]
156  count=count+1
157  if eC==0:
158  print("DAS succeeded after",count,"attempts",eC)
159  else:
160  print("DAS failed 3 times- I give up")
161  for line in pipe.split('\n'):
162  if line.count(".root")>=2:
163  #two files solution...
164  entries=line.replace("\n","").split()
165  prim.append(entries[0])
166  sec.append(entries[1])
167  elif (line.find(".root")!=-1):
168  entry=line.replace("\n","")
169  prim.append(entry)
170  # remove any duplicates
171  prim = sorted(list(set(prim)))
172  sec = sorted(list(set(sec)))
173  if s:
174  if not hasattr(s,"fileNames"):
175  s.fileNames=cms.untracked.vstring(prim)
176  else:
177  s.fileNames.extend(prim)
178  if len(sec)!=0:
179  if not hasattr(s,"secondaryFileNames"):
180  s.secondaryFileNames=cms.untracked.vstring(sec)
181  else:
182  s.secondaryFileNames.extend(sec)
183  print("found files: ",prim)
184  if len(sec)!=0:
185  print("found parent files:",sec)
186  return (prim,sec)
187 
188 def anyOf(listOfKeys,dict,opt=None):
189  for k in listOfKeys:
190  if k in dict:
191  toReturn=dict[k]
192  dict.pop(k)
193  return toReturn
194  if opt!=None:
195  return opt
196  else:
197  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
198 
200  """The main building routines """
201 
202  def __init__(self, options, process = None, with_output = False, with_input = False ):
203  """options taken from old cmsDriver and optparse """
204 
205  options.outfile_name = options.dirout+options.fileout
206 
207  self._options = options
208 
209  if self._options.isData and options.isMC:
210  raise Exception("ERROR: You may specify only --data or --mc, not both")
211  #if not self._options.conditions:
212  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
213 
214  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
215  if 'ENDJOB' in self._options.step:
216  if (hasattr(self._options,"outputDefinition") and \
217  self._options.outputDefinition != '' and \
218  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
219  (hasattr(self._options,"datatier") and \
220  self._options.datatier and \
221  'DQMIO' in self._options.datatier):
222  print("removing ENDJOB from steps since not compatible with DQMIO dataTier")
223  self._options.step=self._options.step.replace(',ENDJOB','')
224 
225 
226 
227  # what steps are provided by this class?
228  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
229  self.stepMap={}
230  self.stepKeys=[]
231  for step in self._options.step.split(","):
232  if step=='': continue
233  stepParts = step.split(":")
234  stepName = stepParts[0]
235  if stepName not in stepList and not stepName.startswith('re'):
236  raise ValueError("Step {} unknown. Available are {}".format( stepName , sorted(stepList)))
237  if len(stepParts)==1:
238  self.stepMap[stepName]=""
239  elif len(stepParts)==2:
240  self.stepMap[stepName]=stepParts[1].split('+')
241  elif len(stepParts)==3:
242  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
243  else:
244  raise ValueError(f"Step definition {step} invalid")
245  self.stepKeys.append(stepName)
246 
247  #print(f"map of steps is: {self.stepMap}")
248 
249  self.with_output = with_output
250  self.process=process
251 
252  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
253  self.with_output = False
254  self.with_input = with_input
255  self.imports = []
256  self.create_process()
257  self.define_Configs()
258  self.schedule = list()
260 
261  # we are doing three things here:
262  # creating a process to catch errors
263  # building the code to re-create the process
264 
266  # TODO: maybe a list of to be dumped objects would help as well
267  self.blacklist_paths = []
268  self.addedObjects = []
270 
276 
277  def profileOptions(self):
278  """
279  addIgProfService
280  Function to add the igprof profile service so that you can dump in the middle
281  of the run.
282  """
283  profileOpts = self._options.profile.split(':')
284  profilerStart = 1
285  profilerInterval = 100
286  profilerFormat = None
287  profilerJobFormat = None
288 
289  if len(profileOpts):
290  #type, given as first argument is unused here
291  profileOpts.pop(0)
292  if len(profileOpts):
293  startEvent = profileOpts.pop(0)
294  if not startEvent.isdigit():
295  raise Exception("%s is not a number" % startEvent)
296  profilerStart = int(startEvent)
297  if len(profileOpts):
298  eventInterval = profileOpts.pop(0)
299  if not eventInterval.isdigit():
300  raise Exception("%s is not a number" % eventInterval)
301  profilerInterval = int(eventInterval)
302  if len(profileOpts):
303  profilerFormat = profileOpts.pop(0)
304 
305 
306  if not profilerFormat:
307  profilerFormat = "%s___%s___%%I.gz" % (
308  self._options.evt_type.replace("_cfi", ""),
309  hashlib.md5(
310  (str(self._options.step) + str(self._options.pileup) + str(self._options.conditions) +
311  str(self._options.datatier) + str(self._options.profileTypeLabel)).encode('utf-8')
312  ).hexdigest()
313  )
314  if not profilerJobFormat and profilerFormat.endswith(".gz"):
315  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
316  elif not profilerJobFormat:
317  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
318 
319  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
320 
322  """
323  addJeProfService
324  Function to add the jemalloc heap profile service so that you can dump in the middle
325  of the run.
326  """
327  profileOpts = self._options.profile.split(':')
328  profilerStart = 1
329  profilerInterval = 100
330  profilerFormat = None
331  profilerJobFormat = None
332 
333  if len(profileOpts):
334  #type, given as first argument is unused here
335  profileOpts.pop(0)
336  if len(profileOpts):
337  startEvent = profileOpts.pop(0)
338  if not startEvent.isdigit():
339  raise Exception("%s is not a number" % startEvent)
340  profilerStart = int(startEvent)
341  if len(profileOpts):
342  eventInterval = profileOpts.pop(0)
343  if not eventInterval.isdigit():
344  raise Exception("%s is not a number" % eventInterval)
345  profilerInterval = int(eventInterval)
346  if len(profileOpts):
347  profilerFormat = profileOpts.pop(0)
348 
349 
350  if not profilerFormat:
351  profilerFormat = "%s___%s___%%I.heap" % (
352  self._options.evt_type.replace("_cfi", ""),
353  hashlib.md5(
354  (str(self._options.step) + str(self._options.pileup) + str(self._options.conditions) +
355  str(self._options.datatier) + str(self._options.profileTypeLabel)).encode('utf-8')
356  ).hexdigest()
357  )
358  if not profilerJobFormat and profilerFormat.endswith(".heap"):
359  profilerJobFormat = profilerFormat.replace(".heap", "_EndOfJob.heap")
360  elif not profilerJobFormat:
361  profilerJobFormat = profilerFormat + "_EndOfJob.heap"
362 
363  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
364 
365  def load(self,includeFile):
366  includeFile = includeFile.replace('/','.')
367  self.process.load(includeFile)
368  return sys.modules[includeFile]
369 
370  def loadAndRemember(self, includeFile):
371  """helper routine to load am memorize imports"""
372  # we could make the imports a on-the-fly data method of the process instance itself
373  # not sure if the latter is a good idea
374  includeFile = includeFile.replace('/','.')
375  self.imports.append(includeFile)
376  self.process.load(includeFile)
377  return sys.modules[includeFile]
378 
379  def executeAndRemember(self, command):
380  """helper routine to remember replace statements"""
381  self.additionalCommands.append(command)
382  if not command.strip().startswith("#"):
383  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
384  import re
385  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
386  #exec(command.replace("process.","self.process."))
387 
388  def addCommon(self):
389  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
390  self.process.options.Rethrow = ['ProductNotFound']
391  self.process.options.fileMode = 'FULLMERGE'
392 
393  self.addedObjects.append(("","options"))
394 
395  if self._options.lazy_download:
396  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
397  stats = cms.untracked.bool(True),
398  enable = cms.untracked.bool(True),
399  cacheHint = cms.untracked.string("lazy-download"),
400  readHint = cms.untracked.string("read-ahead-buffered")
401  )
402  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
403 
404  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
405  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
406 
407  if self._options.profile:
408  (start, interval, eventFormat, jobFormat)=self.profileOptions()
409  self.process.IgProfService = cms.Service("IgProfService",
410  reportFirstEvent = cms.untracked.int32(start),
411  reportEventInterval = cms.untracked.int32(interval),
412  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
413  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
414  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
415 
416  if self._options.heap_profile:
417  (start, interval, eventFormat, jobFormat)=self.heapProfileOptions()
418  self.process.JeProfService = cms.Service("JeProfService",
419  reportFirstEvent = cms.untracked.int32(start),
420  reportEventInterval = cms.untracked.int32(interval),
421  reportToFileAtPostEvent = cms.untracked.string("%s"%(eventFormat)),
422  reportToFileAtPostEndJob = cms.untracked.string("%s"%(jobFormat)))
423  self.addedObjects.append(("Setup JeProf Service for heap profiling","JeProfService"))
424 
425  def addMaxEvents(self):
426  """Here we decide how many evts will be processed"""
427  self.process.maxEvents.input = self._options.number
428  if self._options.number_out:
429  self.process.maxEvents.output = self._options.number_out
430  self.addedObjects.append(("","maxEvents"))
431 
432  def addSource(self):
433  """Here the source is built. Priority: file, generator"""
434  self.addedObjects.append(("Input source","source"))
435 
436  def filesFromOption(self):
437  for entry in self._options.filein.split(','):
438  print("entry",entry)
439  if entry.startswith("filelist:"):
440  filesFromList(entry[9:],self.process.source)
441  elif entry.startswith("dbs:") or entry.startswith("das:"):
442  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
443  else:
444  self.process.source.fileNames.append(self._options.dirin+entry)
445  if self._options.secondfilein:
446  if not hasattr(self.process.source,"secondaryFileNames"):
447  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
448  for entry in self._options.secondfilein.split(','):
449  print("entry",entry)
450  if entry.startswith("filelist:"):
451  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
452  elif entry.startswith("dbs:") or entry.startswith("das:"):
453  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
454  else:
455  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
456 
457  if self._options.filein or self._options.dasquery:
458  if self._options.filetype == "EDM":
459  self.process.source=cms.Source("PoolSource",
460  fileNames = cms.untracked.vstring(),
461  secondaryFileNames= cms.untracked.vstring())
462  filesFromOption(self)
463  elif self._options.filetype == "DAT":
464  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
465  filesFromOption(self)
466  elif self._options.filetype == "LHE":
467  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
468  if self._options.filein.startswith("lhe:"):
469  #list the article directory automatically
470  args=self._options.filein.split(':')
471  article=args[1]
472  print('LHE input from article ',article)
473  location='/store/lhe/'
474  import os
475  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
476  for line in textOfFiles:
477  for fileName in [x for x in line.split() if '.lhe' in x]:
478  self.process.source.fileNames.append(location+article+'/'+fileName)
479  #check first if list of LHE files is loaded (not empty)
480  if len(line)<2:
481  print('Issue to load LHE files, please check and try again.')
482  sys.exit(-1)
483  #Additional check to protect empty fileNames in process.source
484  if len(self.process.source.fileNames)==0:
485  print('Issue with empty filename, but can pass line check')
486  sys.exit(-1)
487  if len(args)>2:
488  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
489  else:
490  filesFromOption(self)
491 
492  elif self._options.filetype == "DQM":
493  self.process.source=cms.Source("DQMRootSource",
494  fileNames = cms.untracked.vstring())
495  filesFromOption(self)
496 
497  elif self._options.filetype == "DQMDAQ":
498  # FIXME: how to configure it if there are no input files specified?
499  self.process.source=cms.Source("DQMStreamerReader")
500 
501 
502  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
503  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
504 
505  if self._options.dasquery!='':
506  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
507  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
508 
509  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
510  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
511 
512 
513  if 'GEN' in self.stepMap.keys() and not self._options.filetype == "LHE":
514  if self._options.inputCommands:
515  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
516  else:
517  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
518 
519  if self.process.source and self._options.inputCommands and not self._options.filetype == "LHE":
520  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
521  for command in self._options.inputCommands.split(','):
522  # remove whitespace around the keep/drop statements
523  command = command.strip()
524  if command=='': continue
525  self.process.source.inputCommands.append(command)
526  if not self._options.dropDescendant:
527  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
528 
529  if self._options.lumiToProcess:
530  import FWCore.PythonUtilities.LumiList as LumiList
531  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
532 
533  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
534  if self.process.source is None:
535  self.process.source=cms.Source("EmptySource")
536 
537  # modify source in case of run-dependent MC
538  self.runsAndWeights=None
539  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
540  if not self._options.isMC :
541  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
542  if self._options.runsAndWeightsForMC:
543  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
544  else:
545  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
546  if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
547  __import__(RunsAndWeights[self._options.runsScenarioForMC])
548  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
549  else:
550  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
551 
552  if self.runsAndWeights:
553  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
555  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
556  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
557 
558  # modify source in case of run-dependent MC (Run-3 method)
560  if self._options.runsAndWeightsForMCIntegerWeights or self._options.runsScenarioForMCIntegerWeights:
561  if not self._options.isMC :
562  raise Exception("options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
563  if self._options.runsAndWeightsForMCIntegerWeights:
564  self.runsAndWeightsInt = eval(self._options.runsAndWeightsForMCIntegerWeights)
565  else:
566  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
567  if isinstance(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights], str):
568  __import__(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights])
569  self.runsAndWeightsInt = sys.modules[RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
570  else:
571  self.runsAndWeightsInt = RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]
572 
573  if self.runsAndWeightsInt:
574  if not self._options.relval:
575  raise Exception("--relval option required when using --runsAndWeightsInt")
576  if 'DATAMIX' in self._options.step:
577  from SimGeneral.Configuration.LumiToRun import lumi_to_run
578  total_events, events_per_job = self._options.relval.split(',')
579  lumi_to_run_mapping = lumi_to_run(self.runsAndWeightsInt, int(total_events), int(events_per_job))
580  self.additionalCommands.append("process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " + str(lumi_to_run_mapping) + "])")
581 
582  return
583 
584  def addOutput(self):
585  """ Add output module to the process """
586  result=""
587  if self._options.outputDefinition:
588  if self._options.datatier:
589  print("--datatier & --eventcontent options ignored")
590 
591  #new output convention with a list of dict
592  outList = eval(self._options.outputDefinition)
593  for (id,outDefDict) in enumerate(outList):
594  outDefDictStr=outDefDict.__str__()
595  if not isinstance(outDefDict,dict):
596  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
597  #requires option: tier
598  theTier=anyOf(['t','tier','dataTier'],outDefDict)
599  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
600 
601  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
602  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
603  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
604  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
605  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
606  # module label has a particular role
607  if not theModuleLabel:
608  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
609  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
610  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
611  ]
612  for name in tryNames:
613  if not hasattr(self.process,name):
614  theModuleLabel=name
615  break
616  if not theModuleLabel:
617  raise Exception("cannot find a module label for specification: "+outDefDictStr)
618  if id==0:
619  defaultFileName=self._options.outfile_name
620  else:
621  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
622 
623  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
624  if not theFileName.endswith('.root'):
625  theFileName+='.root'
626 
627  if len(outDefDict):
628  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
629  if theStreamType=='DQMIO': theStreamType='DQM'
630  if theStreamType=='ALL':
631  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
632  else:
633  theEventContent = getattr(self.process, theStreamType+"EventContent")
634 
635 
636  addAlCaSelects=False
637  if theStreamType=='ALCARECO' and not theFilterName:
638  theFilterName='StreamALCACombined'
639  addAlCaSelects=True
640 
641  CppType='PoolOutputModule'
642  if self._options.timeoutOutput:
643  CppType='TimeoutPoolOutputModule'
644  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
645  output = cms.OutputModule(CppType,
646  theEventContent.clone(),
647  fileName = cms.untracked.string(theFileName),
648  dataset = cms.untracked.PSet(
649  dataTier = cms.untracked.string(theTier),
650  filterName = cms.untracked.string(theFilterName))
651  )
652  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
653  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
654  if not theSelectEvent and hasattr(self.process,'filtering_step'):
655  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
656  if theSelectEvent:
657  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
658 
659  if addAlCaSelects:
660  if not hasattr(output,'SelectEvents'):
661  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
662  for alca in self.AlCaPaths:
663  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
664 
665 
666  if hasattr(self.process,theModuleLabel):
667  raise Exception("the current process already has a module "+theModuleLabel+" defined")
668  #print "creating output module ",theModuleLabel
669  setattr(self.process,theModuleLabel,output)
670  outputModule=getattr(self.process,theModuleLabel)
671  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
672  path=getattr(self.process,theModuleLabel+'_step')
673  self.schedule.append(path)
674 
675  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
676  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"): return label
677  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
678  if theExtraOutputCommands:
679  if not isinstance(theExtraOutputCommands,list):
680  raise Exception("extra ouput command in --option must be a list of strings")
681  if hasattr(self.process,theStreamType+"EventContent"):
682  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
683  else:
684  outputModule.outputCommands.extend(theExtraOutputCommands)
685 
686  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
687 
688 
689  return result
690 
691  streamTypes=self._options.eventcontent.split(',')
692  tiers=self._options.datatier.split(',')
693  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
694  raise Exception("number of event content arguments does not match number of datatier arguments")
695 
696  # if the only step is alca we don't need to put in an output
697  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
698  return "\n"
699 
700  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
701  if streamType=='': continue
702  if streamType == 'ALCARECO' and not 'ALCAPRODUCER' in self._options.step: continue
703  if streamType=='DQMIO': streamType='DQM'
704  eventContent=streamType
705 
706  if streamType == "NANOEDMAOD" :
707  eventContent = "NANOAOD"
708  elif streamType == "NANOEDMAODSIM" :
709  eventContent = "NANOAODSIM"
710  theEventContent = getattr(self.process, eventContent+"EventContent")
711  if i==0:
712  theFileName=self._options.outfile_name
713  theFilterName=self._options.filtername
714  else:
715  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
716  theFilterName=self._options.filtername
717  CppType='PoolOutputModule'
718  if self._options.timeoutOutput:
719  CppType='TimeoutPoolOutputModule'
720  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
721  if "NANOAOD" in streamType : CppType='NanoAODOutputModule'
722  output = cms.OutputModule(CppType,
723  theEventContent,
724  fileName = cms.untracked.string(theFileName),
725  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
726  filterName = cms.untracked.string(theFilterName)
727  )
728  )
729  if hasattr(self.process,"generation_step") and streamType!='LHE':
730  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
731  if hasattr(self.process,"filtering_step"):
732  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
733 
734  if streamType=='ALCARECO':
735  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
736 
737  if "MINIAOD" in streamType:
738  from PhysicsTools.PatAlgos.slimming.miniAOD_tools import miniAOD_customizeOutput
740 
741  outputModuleName=streamType+'output'
742  setattr(self.process,outputModuleName,output)
743  outputModule=getattr(self.process,outputModuleName)
744  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
745  path=getattr(self.process,outputModuleName+'_step')
746  self.schedule.append(path)
747 
748  if self._options.outputCommands and streamType!='DQM':
749  for evct in self._options.outputCommands.split(','):
750  if not evct: continue
751  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
752 
753  if not self._options.inlineEventContent:
754  tmpstreamType=streamType
755  if "NANOEDM" in tmpstreamType :
756  tmpstreamType=tmpstreamType.replace("NANOEDM","NANO")
757  def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
758  return label
759  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
760 
761  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
762 
763  return result
764 
765  def addStandardSequences(self):
766  """
767  Add selected standard sequences to the process
768  """
769  # load the pile up file
770  if self._options.pileup:
771  pileupSpec=self._options.pileup.split(',')[0]
772 
773  # Does the requested pile-up scenario exist?
774  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
775  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
776  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
777  raise Exception(message)
778 
779  # Put mixing parameters in a dictionary
780  if '.' in pileupSpec:
781  mixingDict={'file':pileupSpec}
782  elif pileupSpec.startswith('file:'):
783  mixingDict={'file':pileupSpec[5:]}
784  else:
785  import copy
786  mixingDict=copy.copy(Mixing[pileupSpec])
787  if len(self._options.pileup.split(','))>1:
788  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
789 
790  # Load the pu cfg file corresponding to the requested pu scenario
791  if 'file:' in pileupSpec:
792  #the file is local
793  self.process.load(mixingDict['file'])
794  print("inlining mixing module configuration")
795  self._options.inlineObjects+=',mix'
796  else:
797  self.loadAndRemember(mixingDict['file'])
798 
799  mixingDict.pop('file')
800  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
801  if self._options.pileup_input:
802  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
803  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
804  elif self._options.pileup_input.startswith("filelist:"):
805  mixingDict['F']=(filesFromList(self._options.pileup_input[9:]))[0]
806  else:
807  mixingDict['F']=self._options.pileup_input.split(',')
808  specialization=defineMixing(mixingDict)
809  for command in specialization:
810  self.executeAndRemember(command)
811  if len(mixingDict)!=0:
812  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
813 
814 
815  # load the geometry file
816  try:
817  if len(self.stepMap):
818  self.loadAndRemember(self.GeometryCFF)
819  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
821  if self.geometryDBLabel:
822  self.executeAndRemember('if hasattr(process, "XMLFromDBSource"): process.XMLFromDBSource.label="%s"'%(self.geometryDBLabel))
823  self.executeAndRemember('if hasattr(process, "DDDetectorESProducerFromDB"): process.DDDetectorESProducerFromDB.label="%s"'%(self.geometryDBLabel))
824 
825  except ImportError:
826  print("Geometry option",self._options.geometry,"unknown.")
827  raise
828 
829  if len(self.stepMap):
830  self.loadAndRemember(self.magFieldCFF)
831 
832  for stepName in self.stepKeys:
833  stepSpec = self.stepMap[stepName]
834  print("Step:", stepName,"Spec:",stepSpec)
835  if stepName.startswith('re'):
836 
837  if stepName[2:] not in self._options.donotDropOnInput:
838  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
839  stepName=stepName[2:]
840  if stepSpec=="":
841  getattr(self,"prepare_"+stepName)(stepSpec = getattr(self,stepName+"DefaultSeq"))
842  elif isinstance(stepSpec, list):
843  getattr(self,"prepare_"+stepName)(stepSpec = '+'.join(stepSpec))
844  elif isinstance(stepSpec, tuple):
845  getattr(self,"prepare_"+stepName)(stepSpec = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
846  else:
847  raise ValueError("Invalid step definition")
848 
849  if self._options.restoreRNDSeeds!=False:
850  #it is either True, or a process name
851  if self._options.restoreRNDSeeds==True:
852  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
853  else:
854  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
855  if self._options.inputEventContent or self._options.inputCommands:
856  if self._options.inputCommands:
857  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
858  else:
859  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
860 
861 
862  def completeInputCommand(self):
863  if self._options.inputEventContent:
864  import copy
865  def dropSecondDropStar(iec):
866  #drop occurence of 'drop *' in the list
867  count=0
868  for item in iec:
869  if item=='drop *':
870  if count!=0:
871  iec.remove(item)
872  count+=1
873 
874 
875  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
876  for evct in self._options.inputEventContent.split(','):
877  if evct=='': continue
878  theEventContent = getattr(self.process, evct+"EventContent")
879  if hasattr(theEventContent,'outputCommands'):
880  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
881  if hasattr(theEventContent,'inputCommands'):
882  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
883 
884  dropSecondDropStar(self.process.source.inputCommands)
885 
886  if not self._options.dropDescendant:
887  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
888 
889 
890  return
891 
892  def addConditions(self):
893  """Add conditions to the process"""
894  if not self._options.conditions: return
895 
896  if 'FrontierConditions_GlobalTag' in self._options.conditions:
897  print('using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
898  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
899 
901  from Configuration.AlCa.GlobalTag import GlobalTag
902  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
903  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
904  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
905 
906 
907  def addCustomise(self,unsch=0):
908  """Include the customise code """
909 
910  custOpt=[]
911  if unsch==0:
912  for c in self._options.customisation_file:
913  custOpt.extend(c.split(","))
914  else:
915  for c in self._options.customisation_file_unsch:
916  custOpt.extend(c.split(","))
917 
918  custMap=DictTypes.SortedKeysDict()
919  for opt in custOpt:
920  if opt=='': continue
921  if opt.count('.')>1:
922  raise Exception("more than . in the specification:"+opt)
923  fileName=opt.split('.')[0]
924  if opt.count('.')==0: rest='customise'
925  else:
926  rest=opt.split('.')[1]
927  if rest=='py': rest='customise' #catch the case of --customise file.py
928 
929  if fileName in custMap:
930  custMap[fileName].extend(rest.split('+'))
931  else:
932  custMap[fileName]=rest.split('+')
933 
934  if len(custMap)==0:
935  final_snippet='\n'
936  else:
937  final_snippet='\n# customisation of the process.\n'
938 
939  allFcn=[]
940  for opt in custMap:
941  allFcn.extend(custMap[opt])
942  for fcn in allFcn:
943  if allFcn.count(fcn)!=1:
944  raise Exception("cannot specify twice "+fcn+" as a customisation method")
945 
946  for f in custMap:
947  # let python search for that package and do syntax checking at the same time
948  packageName = f.replace(".py","").replace("/",".")
949  __import__(packageName)
950  package = sys.modules[packageName]
951 
952  # now ask the package for its definition and pick .py instead of .pyc
953  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
954 
955  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
956  if self._options.inline_custom:
957  for line in file(customiseFile,'r'):
958  if "import FWCore.ParameterSet.Config" in line:
959  continue
960  final_snippet += line
961  else:
962  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
963  for fcn in custMap[f]:
964  print("customising the process with",fcn,"from",f)
965  if not hasattr(package,fcn):
966  #bound to fail at run time
967  raise Exception("config "+f+" has no function "+fcn)
968  #execute the command
969  self.process=getattr(package,fcn)(self.process)
970  #and print it in the configuration
971  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
972  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
973 
974  if len(custMap)!=0:
975  final_snippet += '\n# End of customisation functions\n'
976 
977 
978  return final_snippet
979 
980  def addCustomiseCmdLine(self):
981  final_snippet='\n# Customisation from command line\n'
982  if self._options.customise_commands:
983  import string
984  for com in self._options.customise_commands.split('\\n'):
985  com=com.lstrip()
986  self.executeAndRemember(com)
987  final_snippet +='\n'+com
988 
989  return final_snippet
990 
991  #----------------------------------------------------------------------------
992  # here the methods to define the python includes for each step or
993  # conditions
994  #----------------------------------------------------------------------------
995  def define_Configs(self):
996  if len(self.stepMap):
997  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
998  if self._options.particleTable not in defaultOptions.particleTableList:
999  print('Invalid particle table provided. Options are:')
1000  print(defaultOptions.particleTable)
1001  sys.exit(-1)
1002  else:
1003  if len(self.stepMap):
1004  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
1005 
1006  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
1007 
1008  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
1009  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
1010  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
1011  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
1012  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
1013  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
1014  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
1015  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
1016  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
1017  if self._options.isRepacked: self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_DataMapper_cff"
1018  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
1019  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
1020  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
1021  self.RECOSIMDefaultCFF="Configuration/StandardSequences/RecoSim_cff"
1022  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
1023  self.NANODefaultCFF="PhysicsTools/NanoAOD/nano_cff"
1024  self.NANOGENDefaultCFF="PhysicsTools/NanoAOD/nanogen_cff"
1025  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
1026  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
1027  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
1028  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
1029  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
1030  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
1031  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
1032  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
1033  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
1034  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
1035  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
1037  if "DATAMIX" in self.stepMap.keys():
1038  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
1039  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
1040  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
1041  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
1042 
1043  self.ALCADefaultSeq=None
1044  self.LHEDefaultSeq='externalLHEProducer'
1045  self.GENDefaultSeq='pgen'
1046  self.SIMDefaultSeq='psim'
1047  self.DIGIDefaultSeq='pdigi'
1049  self.DIGI2RAWDefaultSeq='DigiToRaw'
1050  self.HLTDefaultSeq='GRun'
1051  self.L1DefaultSeq=None
1056  self.RAW2DIGIDefaultSeq='RawToDigi'
1057  self.L1RecoDefaultSeq='L1Reco'
1058  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
1059  if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
1060  self.RECODefaultSeq='reconstruction'
1061  else:
1062  self.RECODefaultSeq='reconstruction_fromRECO'
1063  self.RECOSIMDefaultSeq='recosim'
1065  self.L1HwValDefaultSeq='L1HwVal'
1066  self.DQMDefaultSeq='DQMOffline'
1068  self.ENDJOBDefaultSeq='endOfProcess'
1069  self.REPACKDefaultSeq='DigiToRawRepack'
1070  self.PATDefaultSeq='miniAOD'
1071  self.PATGENDefaultSeq='miniGEN'
1072  #TODO: Check based of file input
1073  self.NANOGENDefaultSeq='nanogenSequence'
1074  self.NANODefaultSeq='nanoSequence'
1075  self.NANODefaultCustom='nanoAOD_customizeCommon'
1077  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
1079  if not self._options.beamspot:
1080  self._options.beamspot=VtxSmearedDefaultKey
1081 
1082  # if its MC then change the raw2digi
1083  if self._options.isMC==True:
1084  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
1085  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
1086  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
1087  self.PATGENDefaultCFF="Configuration/StandardSequences/PATGEN_cff"
1088  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
1089  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
1090  self.NANODefaultSeq='nanoSequenceMC'
1091  else:
1092  self._options.beamspot = None
1093 
1094  #patch for gen, due to backward incompatibility
1095  if 'reGEN' in self.stepMap:
1096  self.GENDefaultSeq='fixGenInfo'
1097 
1098  if self._options.scenario=='cosmics':
1099  self._options.pileup='Cosmics'
1100  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1101  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1102  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1103  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1104  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1105  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1106  if self._options.isMC==True:
1107  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1108  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1109  self.RECODefaultSeq='reconstructionCosmics'
1110  self.DQMDefaultSeq='DQMOfflineCosmics'
1111 
1112  if self._options.scenario=='HeavyIons':
1113  if not self._options.beamspot:
1114  self._options.beamspot=VtxSmearedHIDefaultKey
1115  self.HLTDefaultSeq = 'HIon'
1116  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1117  self.VALIDATIONDefaultSeq=''
1118  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1119  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1120  self.RECODefaultSeq='reconstructionHeavyIons'
1121  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1122  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1123  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1124  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1125  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1126  if self._options.isMC==True:
1127  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1128 
1129 
1132  self.USERDefaultSeq='user'
1133  self.USERDefaultCFF=None
1135  # the magnetic field
1136  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1137  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1138 
1139  # the geometry
1140  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1142  simGeometry=''
1143  if self._options.fast:
1144  if 'start' in self._options.conditions.lower():
1145  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1146  else:
1147  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1148  else:
1149  def inGeometryKeys(opt):
1150  from Configuration.StandardSequences.GeometryConf import GeometryConf
1151  if opt in GeometryConf:
1152  return GeometryConf[opt]
1153  else:
1154  return opt
1155 
1156  geoms=self._options.geometry.split(',')
1157  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1158  if len(geoms)==2:
1159  #may specify the reco geometry
1160  if '/' in geoms[1] or '_cff' in geoms[1]:
1161  self.GeometryCFF=geoms[1]
1162  else:
1163  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1164 
1165  if (geoms[0].startswith('DB:')):
1166  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1167  self.geometryDBLabel=geoms[0][3:]
1168  print("with DB:")
1169  else:
1170  if '/' in geoms[0] or '_cff' in geoms[0]:
1171  self.SimGeometryCFF=geoms[0]
1172  else:
1173  simGeometry=geoms[0]
1174  if self._options.gflash==True:
1175  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1176  else:
1177  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1178 
1179  # synchronize the geometry configuration and the FullSimulation sequence to be used
1180  if simGeometry not in defaultOptions.geometryExtendedOptions:
1181  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1182 
1183  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1184  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1185  self._options.beamspot='NoSmear'
1186 
1187  # fastsim requires some changes to the default cff files and sequences
1188  if self._options.fast:
1189  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1190  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1191  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1192  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1193  self.NANODefaultSeq = 'nanoSequenceFS'
1194  self.DQMOFFLINEDefaultCFF="DQMOffline.Configuration.DQMOfflineFS_cff"
1195 
1196  # Mixing
1197  if self._options.pileup=='default':
1198  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1199  self._options.pileup=MixingDefaultKey
1200 
1201 
1202  #not driven by a default cff anymore
1203  if self._options.isData:
1204  self._options.pileup=None
1205 
1206 
1209  # for alca, skims, etc
1210  def addExtraStream(self, name, stream, workflow='full'):
1211  # define output module and go from there
1212  output = cms.OutputModule("PoolOutputModule")
1213  if stream.selectEvents.parameters_().__len__()!=0:
1214  output.SelectEvents = stream.selectEvents
1215  else:
1216  output.SelectEvents = cms.untracked.PSet()
1217  output.SelectEvents.SelectEvents=cms.vstring()
1218  if isinstance(stream.paths,tuple):
1219  for path in stream.paths:
1220  output.SelectEvents.SelectEvents.append(path.label())
1221  else:
1222  output.SelectEvents.SelectEvents.append(stream.paths.label())
1223 
1224 
1225 
1226  if isinstance(stream.content,str):
1227  evtPset=getattr(self.process,stream.content)
1228  for p in evtPset.parameters_():
1229  setattr(output,p,getattr(evtPset,p))
1230  if not self._options.inlineEventContent:
1231  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1232  return label
1233  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1234  else:
1235  output.outputCommands = stream.content
1236 
1237 
1238  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1239 
1240  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1241  filterName = cms.untracked.string(stream.name))
1242 
1243  if self._options.filtername:
1244  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1245 
1246  #add an automatic flushing to limit memory consumption
1247  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1248 
1249  if workflow in ("producers,full"):
1250  if isinstance(stream.paths,tuple):
1251  for path in stream.paths:
1252  self.schedule.append(path)
1253  else:
1254  self.schedule.append(stream.paths)
1255 
1256 
1257  # in case of relvals we don't want to have additional outputs
1258  if (not self._options.relval) and workflow in ("full","output"):
1259  self.additionalOutputs[name] = output
1260  setattr(self.process,name,output)
1261 
1262  if workflow == 'output':
1263  # adjust the select events to the proper trigger results from previous process
1264  filterList = output.SelectEvents.SelectEvents
1265  for i, filter in enumerate(filterList):
1266  filterList[i] = filter+":"+self._options.triggerResultsProcess
1267 
1268  return output
1269 
1270  #----------------------------------------------------------------------------
1271  # here the methods to create the steps. Of course we are doing magic here ;)
1272  # prepare_STEPNAME modifies self.process and what else's needed.
1273  #----------------------------------------------------------------------------
1274 
1275  def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ=''):
1276  _dotsplit = stepSpec.split('.')
1277  if ( len(_dotsplit)==1 ):
1278  if '/' in _dotsplit[0]:
1279  _sequence = defaultSEQ if defaultSEQ else stepSpec
1280  _cff = _dotsplit[0]
1281  else:
1282  _sequence = stepSpec
1283  _cff = defaultCFF
1284  elif ( len(_dotsplit)==2 ):
1285  _cff,_sequence = _dotsplit
1286  else:
1287  print("sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1288  print(stepSpec,"not recognized")
1289  raise
1290  l=self.loadAndRemember(_cff)
1291  return l,_sequence,_cff
1292 
1293  def scheduleSequence(self,seq,prefix,what='Path'):
1294  if '*' in seq:
1295  #create only one path with all sequences in it
1296  for i,s in enumerate(seq.split('*')):
1297  if i==0:
1298  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1299  else:
1300  p=getattr(self.process,prefix)
1301  tmp = getattr(self.process, s)
1302  if isinstance(tmp, cms.Task):
1303  p.associate(tmp)
1304  else:
1305  p+=tmp
1306  self.schedule.append(getattr(self.process,prefix))
1307  return
1308  else:
1309  #create as many path as many sequences
1310  if not '+' in seq:
1311  if self.nextScheduleIsConditional:
1312  self.conditionalPaths.append(prefix)
1313  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1314  self.schedule.append(getattr(self.process,prefix))
1315  else:
1316  for i,s in enumerate(seq.split('+')):
1317  sn=prefix+'%d'%(i)
1318  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1319  self.schedule.append(getattr(self.process,sn))
1320  return
1321 
1322  def scheduleSequenceAtEnd(self,seq,prefix):
1323  self.scheduleSequence(seq,prefix,what='EndPath')
1324  return
1325 
1326  def prepare_ALCAPRODUCER(self, stepSpec = None):
1327  self.prepare_ALCA(stepSpec, workflow = "producers")
1328 
1329  def prepare_ALCAOUTPUT(self, stepSpec = None):
1330  self.prepare_ALCA(stepSpec, workflow = "output")
1331 
1332  def prepare_ALCA(self, stepSpec = None, workflow = 'full'):
1333  """ Enrich the process with alca streams """
1334  alcaConfig,sequence,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.ALCADefaultCFF)
1335 
1336  MAXLEN=31 #the alca producer name should be shorter than 31 chars as per https://cms-talk.web.cern.ch/t/alcaprompt-datasets-not-loaded-in-dbs/11146/2
1337  # decide which ALCA paths to use
1338  alcaList = sequence.split("+")
1339  for alca in alcaList:
1340  if (len(alca)>MAXLEN):
1341  raise Exception("The following alca "+str(alca)+" name (with length "+str(len(alca))+" chars) cannot be accepted because it exceeds the DBS constraints on the length of the name of the ALCARECOs producers ("+str(MAXLEN)+")!")
1342 
1343  maxLevel=0
1344  from Configuration.AlCa.autoAlca import autoAlca, AlCaNoConcurrentLumis
1345  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1346  self.expandMapping(alcaList,autoAlca)
1347  self.AlCaPaths=[]
1348  for name in alcaConfig.__dict__:
1349  alcastream = getattr(alcaConfig,name)
1350  shortName = name.replace('ALCARECOStream','')
1351  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1352  if shortName in AlCaNoConcurrentLumis:
1353  print("Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".format(shortName))
1354  self._options.nConcurrentLumis = 1
1355  self._options.nConcurrentIOVs = 1
1356  output = self.addExtraStream(name,alcastream, workflow = workflow)
1357  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1358  self.AlCaPaths.append(shortName)
1359  if 'DQM' in alcaList:
1360  if not self._options.inlineEventContent and hasattr(self.process,name):
1361  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1362  else:
1363  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1364 
1365  #rename the HLT process name in the alca modules
1366  if self._options.hltProcess or 'HLT' in self.stepMap:
1367  if isinstance(alcastream.paths,tuple):
1368  for path in alcastream.paths:
1369  self.renameHLTprocessInSequence(path.label())
1370  else:
1371  self.renameHLTprocessInSequence(alcastream.paths.label())
1372 
1373  for i in range(alcaList.count(shortName)):
1374  alcaList.remove(shortName)
1375 
1376  # DQM needs a special handling
1377  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1378  path = getattr(alcaConfig,name)
1379  self.schedule.append(path)
1380  alcaList.remove('DQM')
1381 
1382  if isinstance(alcastream,cms.Path):
1383  #black list the alca path so that they do not appear in the cfg
1384  self.blacklist_paths.append(alcastream)
1385 
1386 
1387  if len(alcaList) != 0:
1388  available=[]
1389  for name in alcaConfig.__dict__:
1390  alcastream = getattr(alcaConfig,name)
1391  if isinstance(alcastream,cms.FilteredStream):
1392  available.append(name.replace('ALCARECOStream',''))
1393  print("The following alcas could not be found "+str(alcaList))
1394  print("available ",available)
1395  #print "verify your configuration, ignoring for now"
1396  raise Exception("The following alcas could not be found "+str(alcaList))
1397 
1398  def prepare_LHE(self, stepSpec = None):
1399  #load the fragment
1400 
1401  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1402  print("Loading lhe fragment from",loadFragment)
1403  __import__(loadFragment)
1404  self.process.load(loadFragment)
1405 
1406  self._options.inlineObjects+=','+stepSpec
1407 
1408  getattr(self.process,stepSpec).nEvents = self._options.number
1409 
1410  #schedule it
1411  self.process.lhe_step = cms.Path( getattr( self.process,stepSpec) )
1412  self.excludedPaths.append("lhe_step")
1413  self.schedule.append( self.process.lhe_step )
1414 
1415  def prepare_GEN(self, stepSpec = None):
1416  """ load the fragment of generator configuration """
1417  loadFailure=False
1418  #remove trailing .py
1419  #support old style .cfi by changing into something.cfi into something_cfi
1420  #remove python/ from the name
1421  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1422  #standard location of fragments
1423  if not '/' in loadFragment:
1424  loadFragment='Configuration.Generator.'+loadFragment
1425  else:
1426  loadFragment=loadFragment.replace('/','.')
1427  try:
1428  print("Loading generator fragment from",loadFragment)
1429  __import__(loadFragment)
1430  except:
1431  loadFailure=True
1432  #if self.process.source and self.process.source.type_()=='EmptySource':
1433  if not (self._options.filein or self._options.dasquery):
1434  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1435 
1436  if not loadFailure:
1437  from Configuration.Generator.concurrentLumisDisable import noConcurrentLumiGenerators
1438 
1439  generatorModule=sys.modules[loadFragment]
1440  genModules=generatorModule.__dict__
1441  #remove lhe producer module since this should have been
1442  #imported instead in the LHE step
1443  if self.LHEDefaultSeq in genModules:
1444  del genModules[self.LHEDefaultSeq]
1445 
1446  if self._options.hideGen:
1447  self.loadAndRemember(loadFragment)
1448  else:
1449  self.process.load(loadFragment)
1450  # expose the objects from that fragment to the configuration
1451  import FWCore.ParameterSet.Modules as cmstypes
1452  for name in genModules:
1453  theObject = getattr(generatorModule,name)
1454  if isinstance(theObject, cmstypes._Module):
1455  self._options.inlineObjects=name+','+self._options.inlineObjects
1456  if theObject.type_() in noConcurrentLumiGenerators:
1457  print("Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".format(theObject.type_()))
1458  self._options.nConcurrentLumis = 1
1459  self._options.nConcurrentIOVs = 1
1460  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1461  self._options.inlineObjects+=','+name
1462 
1463  if stepSpec == self.GENDefaultSeq or stepSpec == 'pgen_genonly' or stepSpec == 'pgen_smear':
1464  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1465  self.productionFilterSequence = 'ProductionFilterSequence'
1466  elif 'generator' in genModules:
1467  self.productionFilterSequence = 'generator'
1468 
1469  """ Enrich the schedule with the rest of the generation step """
1470  _,_genSeqName,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.GENDefaultCFF)
1471 
1472  if True:
1473  try:
1474  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1475  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1476  self.loadAndRemember(cffToBeLoaded)
1477  except ImportError:
1478  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1479 
1480  if self._options.scenario == 'HeavyIons':
1481  if self._options.pileup=='HiMixGEN':
1482  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1483  elif self._options.pileup=='HiMixEmbGEN':
1484  self.loadAndRemember("Configuration/StandardSequences/GeneratorEmbMix_cff")
1485  else:
1486  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1487 
1488  self.process.generation_step = cms.Path( getattr(self.process,_genSeqName) )
1489  self.schedule.append(self.process.generation_step)
1490 
1491  #register to the genstepfilter the name of the path (static right now, but might evolve)
1492  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1493 
1494  if 'reGEN' in self.stepMap:
1495  #stop here
1496  return
1497 
1498  """ Enrich the schedule with the summary of the filter step """
1499  #the gen filter in the endpath
1500  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1501  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1502  return
1503 
1504  def prepare_SIM(self, stepSpec = None):
1505  """ Enrich the schedule with the simulation step"""
1506  _,_simSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.SIMDefaultCFF)
1507  if not self._options.fast:
1508  if self._options.gflash==True:
1509  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1510 
1511  if self._options.magField=='0T':
1512  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1513  else:
1514  if self._options.magField=='0T':
1515  self.executeAndRemember("process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1516 
1517  self.scheduleSequence(_simSeq,'simulation_step')
1518  return
1519 
1520  def prepare_DIGI(self, stepSpec = None):
1521  """ Enrich the schedule with the digitisation step"""
1522  _,_digiSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DIGIDefaultCFF)
1523 
1524  if self._options.gflash==True:
1525  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1526 
1527  if _digiSeq == 'pdigi_valid' or _digiSeq == 'pdigi_hi':
1528  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1529 
1530  if _digiSeq != 'pdigi_nogen' and _digiSeq != 'pdigi_valid_nogen' and _digiSeq != 'pdigi_hi_nogen' and not self.process.source.type_()=='EmptySource' and not self._options.filetype == "LHE":
1531  if self._options.inputEventContent=='':
1532  self._options.inputEventContent='REGEN'
1533  else:
1534  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1535 
1536 
1537  self.scheduleSequence(_digiSeq,'digitisation_step')
1538  return
1539 
1540  def prepare_CFWRITER(self, stepSpec = None):
1541  """ Enrich the schedule with the crossing frame writer step"""
1543  self.scheduleSequence('pcfw','cfwriter_step')
1544  return
1545 
1546  def prepare_DATAMIX(self, stepSpec = None):
1547  """ Enrich the schedule with the digitisation step"""
1549  self.scheduleSequence('pdatamix','datamixing_step')
1550 
1551  if self._options.pileup_input:
1552  theFiles=''
1553  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1554  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1555  elif self._options.pileup_input.startswith("filelist:"):
1556  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1557  else:
1558  theFiles=self._options.pileup_input.split(',')
1559  #print theFiles
1560  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1561 
1562  return
1563 
1564  def prepare_DIGI2RAW(self, stepSpec = None):
1565  _,_digi2rawSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DIGI2RAWDefaultCFF)
1566  self.scheduleSequence(_digi2rawSeq,'digi2raw_step')
1567  return
1568 
1569  def prepare_REPACK(self, stepSpec = None):
1570  _,_repackSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.REPACKDefaultCFF)
1571  self.scheduleSequence(_repackSeq,'digi2repack_step')
1572  return
1573 
1574  def prepare_L1(self, stepSpec = None):
1575  """ Enrich the schedule with the L1 simulation step"""
1576  assert(stepSpec == None)
1577  self.loadAndRemember(self.L1EMDefaultCFF)
1578  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1579  return
1580 
1581  def prepare_L1REPACK(self, stepSpec = None):
1582  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1583  supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT','CalouGT']
1584  if stepSpec in supported:
1585  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'% stepSpec)
1586  if self._options.scenario == 'HeavyIons':
1587  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1588  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1589  else:
1590  print("L1REPACK with '",stepSpec,"' is not supported! Supported choices are: ",supported)
1591  raise Exception('unsupported feature')
1592 
1593  def prepare_HLT(self, stepSpec = None):
1594  """ Enrich the schedule with the HLT simulation step"""
1595  if not stepSpec:
1596  print("no specification of the hlt menu has been given, should never happen")
1597  raise Exception('no HLT specifications provided')
1598 
1599  if '@' in stepSpec:
1600  # case where HLT:@something was provided
1601  from Configuration.HLT.autoHLT import autoHLT
1602  key = stepSpec[1:]
1603  if key in autoHLT:
1604  stepSpec = autoHLT[key]
1605  else:
1606  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1607 
1608  if ',' in stepSpec:
1609  #case where HLT:something:something was provided
1610  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1611  optionsForHLT = {}
1612  if self._options.scenario == 'HeavyIons':
1613  optionsForHLT['type'] = 'HIon'
1614  else:
1615  optionsForHLT['type'] = 'GRun'
1616  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.items())
1617  if stepSpec == 'run,fromSource':
1618  if hasattr(self.process.source,'firstRun'):
1619  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1620  elif hasattr(self.process.source,'setRunNumber'):
1621  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1622  else:
1623  raise Exception(f'Cannot replace menu to load {stepSpec}')
1624  else:
1625  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(stepSpec.replace(',',':'),optionsForHLTConfig))
1626  else:
1627  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % stepSpec)
1628 
1629  if self._options.isMC:
1630  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1631 
1632  if self._options.name != 'HLT':
1633  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1634  self.additionalCommands.append('process = ProcessName(process)')
1635  self.additionalCommands.append('')
1636  from HLTrigger.Configuration.CustomConfigs import ProcessName
1637  self.process = ProcessName(self.process)
1638 
1639  if self.process.schedule == None:
1640  raise Exception('the HLT step did not attach a valid schedule to the process')
1641 
1642  self.scheduleIndexOfFirstHLTPath = len(self.schedule)
1643  [self.blacklist_paths.append(path) for path in self.process.schedule if isinstance(path,(cms.Path,cms.EndPath))]
1644 
1645  # this is a fake, to be removed with fastim migration and HLT menu dump
1646  if self._options.fast:
1647  if not hasattr(self.process,'HLTEndSequence'):
1648  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1649 
1650 
1651  def prepare_RAW2RECO(self, stepSpec = None):
1652  if ','in stepSpec:
1653  seqReco,seqDigi=stepSpec.spli(',')
1654  else:
1655  print(f"RAW2RECO requires two specifications {stepSpec} insufficient")
1656 
1657  self.prepare_RAW2DIGI(seqDigi)
1658  self.prepare_RECO(seqReco)
1659  return
1660 
1661  def prepare_RAW2DIGI(self, stepSpec = "RawToDigi"):
1662  _,_raw2digiSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RAW2DIGIDefaultCFF)
1663  self.scheduleSequence(_raw2digiSeq,'raw2digi_step')
1664  return
1665 
1666  def prepare_PATFILTER(self, stepSpec = None):
1667  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1668  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1669  for filt in allMetFilterPaths:
1670  self.schedule.append(getattr(self.process,'Flag_'+filt))
1671 
1672  def prepare_L1HwVal(self, stepSpec = 'L1HwVal'):
1673  ''' Enrich the schedule with L1 HW validation '''
1674  self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1HwValDefaultCFF)
1675  print('\n\n\n DEPRECATED this has no action \n\n\n')
1676  return
1677 
1678  def prepare_L1Reco(self, stepSpec = "L1Reco"):
1679  ''' Enrich the schedule with L1 reconstruction '''
1680  _,_l1recoSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1RecoDefaultCFF)
1681  self.scheduleSequence(_l1recoSeq,'L1Reco_step')
1682  return
1683 
1684  def prepare_L1TrackTrigger(self, stepSpec = "L1TrackTrigger"):
1685  ''' Enrich the schedule with L1 reconstruction '''
1686  _,_l1tracktriggerSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1TrackTriggerDefaultCFF)
1687  self.scheduleSequence(_l1tracktriggerSeq,'L1TrackTrigger_step')
1688  return
1689 
1690  def prepare_FILTER(self, stepSpec = None):
1691  ''' Enrich the schedule with a user defined filter sequence '''
1692 
1693  filterConfig,filterSeq = stepSpec.split('.')
1694  filterConfig=self.load(filterConfig)
1695 
1696  class PrintAllModules(object):
1697  def __init__(self):
1698  self.inliner=''
1699  pass
1700  def enter(self,visitee):
1701  try:
1702  label=visitee.label()
1703 
1704  self.inliner=label+','+self.inliner
1705  except:
1706  pass
1707  def leave(self,v): pass
1708 
1709  expander=PrintAllModules()
1710  getattr(self.process,filterSeq).visit( expander )
1711  self._options.inlineObjects+=','+expander.inliner
1712  self._options.inlineObjects+=','+filterSeq
1713 
1714 
1715  self.scheduleSequence(filterSeq,'filtering_step')
1716  self.nextScheduleIsConditional=True
1717 
1718  self.productionFilterSequence = filterSeq
1719 
1720  return
1721 
1722  def prepare_RECO(self, stepSpec = "reconstruction"):
1723  ''' Enrich the schedule with reconstruction '''
1724  _,_recoSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RECODefaultCFF)
1725  self.scheduleSequence(_recoSeq,'reconstruction_step')
1726  return
1727 
1728  def prepare_RECOSIM(self, stepSpec = "recosim"):
1729  ''' Enrich the schedule with reconstruction '''
1730  _,_recosimSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RECOSIMDefaultCFF)
1731  self.scheduleSequence(_recosimSeq,'recosim_step')
1732  return
1733 
1734  def prepare_RECOBEFMIX(self, stepSpec = "reconstruction"):
1735  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1736  if not self._options.fast:
1737  print("ERROR: this step is only implemented for FastSim")
1738  sys.exit()
1739  _,_recobefmixSeq,_ = self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1740  self.scheduleSequence(_recobefmixSeq,'reconstruction_befmix_step')
1741  return
1742 
1743  def prepare_PAT(self, stepSpec = "miniAOD"):
1744  ''' Enrich the schedule with PAT '''
1745  self.prepare_PATFILTER(self)
1746  self.loadDefaultOrSpecifiedCFF(stepSpec,self.PATDefaultCFF)
1747  self.labelsToAssociate.append('patTask')
1748  if self._options.isData:
1749  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1750  else:
1751  if self._options.fast:
1752  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1753  else:
1754  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1755 
1756  if self._options.hltProcess:
1757  if len(self._options.customise_commands) > 1:
1758  self._options.customise_commands = self._options.customise_commands + " \n"
1759  self._options.customise_commands = self._options.customise_commands + "process.patTrigger.processName = \""+self._options.hltProcess+"\"\n"
1760  self._options.customise_commands = self._options.customise_commands + "process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1761  self._options.customise_commands = self._options.customise_commands + "process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1762 
1763 # self.renameHLTprocessInSequence(sequence)
1764 
1765  return
1766 
1767  def prepare_PATGEN(self, stepSpec = "miniGEN"):
1768  ''' Enrich the schedule with PATGEN '''
1769  self.loadDefaultOrSpecifiedCFF(stepSpec,self.PATGENDefaultCFF) #this is unscheduled
1770  self.labelsToAssociate.append('patGENTask')
1771  if self._options.isData:
1772  raise Exception("PATGEN step can only run on MC")
1773  return
1774 
1775  def prepare_NANO(self, stepSpec = '' ):
1776  print(f"in prepare_nano {stepSpec}")
1777  ''' Enrich the schedule with NANO '''
1778  _,_nanoSeq,_nanoCff = self.loadDefaultOrSpecifiedCFF(stepSpec,self.NANODefaultCFF,self.NANODefaultSeq)
1779 
1780  # create full specified sequence using autoNANO
1781  from PhysicsTools.NanoAOD.autoNANO import autoNANO, expandNanoMapping
1782  # if not a autoNANO mapping, load an empty customization, which later will be converted into the default.
1783  _nanoCustoms = _nanoSeq.split('+') if '@' in stepSpec else ['']
1784  _nanoSeq = _nanoSeq.split('+')
1785  expandNanoMapping(_nanoSeq, autoNANO, 'sequence')
1786  expandNanoMapping(_nanoCustoms, autoNANO, 'customize')
1787  # make sure there are no duplicates while preserving the ordering
1788  _nanoSeq = list(sorted(set(_nanoSeq), key=_nanoSeq.index))
1789  _nanoCustoms = list(sorted(set(_nanoCustoms), key=_nanoCustoms.index))
1790  # replace empty sequence with default
1791  _nanoSeq = [seq if seq!='' else self.NANODefaultSeq for seq in _nanoSeq]
1792  _nanoCustoms = [cust if cust!='' else self.NANODefaultCustom for cust in _nanoCustoms]
1793  # build and inject the sequence
1794  if len(_nanoSeq) < 1 and '@' in stepSpec:
1795  raise Exception(f'The specified mapping: {stepSpec} generates an empty NANO sequence. Please provide a valid mappign')
1796  self.scheduleSequence('+'.join(_nanoSeq), 'nanoAOD_step')
1797 
1798  # add the customisations
1799  for custom in _nanoCustoms:
1800  custom_path = custom if '.' in custom else '.'.join([_nanoCff,custom])
1801  # customization order can be important for NANO, here later specified customise take precedence
1802  self._options.customisation_file.append(custom_path)
1803  if self._options.hltProcess:
1804  if len(self._options.customise_commands) > 1:
1805  self._options.customise_commands = self._options.customise_commands + " \n"
1806  self._options.customise_commands = self._options.customise_commands + "process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1807 
1808  def prepare_NANOGEN(self, stepSpec = "nanoAOD"):
1809  ''' Enrich the schedule with NANOGEN '''
1810  # TODO: Need to modify this based on the input file type
1811  fromGen = any([x in self.stepMap for x in ['LHE', 'GEN', 'AOD']])
1812  _,_nanogenSeq,_nanogenCff = self.loadDefaultOrSpecifiedCFF(stepSpec,self.NANOGENDefaultCFF)
1813  self.scheduleSequence(_nanogenSeq,'nanoAOD_step')
1814  custom = "customizeNanoGEN" if fromGen else "customizeNanoGENFromMini"
1815  if self._options.runUnscheduled:
1816  self._options.customisation_file_unsch.insert(0, '.'.join([_nanogenCff, custom]))
1817  else:
1818  self._options.customisation_file.insert(0, '.'.join([_nanogenCff, custom]))
1819 
1820  def prepare_SKIM(self, stepSpec = "all"):
1821  ''' Enrich the schedule with skimming fragments'''
1822  skimConfig,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.SKIMDefaultCFF)
1823 
1824  stdHLTProcName = 'HLT'
1825  newHLTProcName = self._options.hltProcess
1826  customiseForReHLT = (newHLTProcName or (stdHLTProcName in self.stepMap)) and (newHLTProcName != stdHLTProcName)
1827  if customiseForReHLT:
1828  print("replacing %s process name - step SKIM:%s will use '%s'" % (stdHLTProcName, sequence, newHLTProcName))
1829 
1830 
1831  from Configuration.Skimming.autoSkim import autoSkim
1832  skimlist = sequence.split('+')
1833  self.expandMapping(skimlist,autoSkim)
1834 
1835  #print("dictionary for skims:", skimConfig.__dict__)
1836  for skim in skimConfig.__dict__:
1837  skimstream = getattr(skimConfig, skim)
1838 
1839  # blacklist AlCa paths so that they do not appear in the cfg
1840  if isinstance(skimstream, cms.Path):
1841  self.blacklist_paths.append(skimstream)
1842  # if enabled, apply "hltProcess" renaming to Sequences
1843  elif isinstance(skimstream, cms.Sequence):
1844  if customiseForReHLT:
1845  self.renameHLTprocessInSequence(skim, proc = newHLTProcName, HLTprocess = stdHLTProcName, verbosityLevel = 0)
1846 
1847  if not isinstance(skimstream, cms.FilteredStream):
1848  continue
1849 
1850  shortname = skim.replace('SKIMStream','')
1851  if (sequence=="all"):
1852  self.addExtraStream(skim,skimstream)
1853  elif (shortname in skimlist):
1854  self.addExtraStream(skim,skimstream)
1855  #add a DQM eventcontent for this guy
1856  if self._options.datatier=='DQM':
1857  self.process.load(self.EVTCONTDefaultCFF)
1858  skimstreamDQM = cms.FilteredStream(
1859  responsible = skimstream.responsible,
1860  name = skimstream.name+'DQM',
1861  paths = skimstream.paths,
1862  selectEvents = skimstream.selectEvents,
1863  content = self._options.datatier+'EventContent',
1864  dataTier = cms.untracked.string(self._options.datatier)
1865  )
1866  self.addExtraStream(skim+'DQM',skimstreamDQM)
1867  for i in range(skimlist.count(shortname)):
1868  skimlist.remove(shortname)
1869 
1870  if (skimlist.__len__()!=0 and sequence!="all"):
1871  print('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1872  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1873 
1874 
1875  def prepare_USER(self, stepSpec = None):
1876  ''' Enrich the schedule with a user defined sequence '''
1877  _,_userSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.USERDefaultCFF)
1878  self.scheduleSequence(_userSeq,'user_step')
1879  return
1880 
1881  def prepare_POSTRECO(self, stepSpec = None):
1882  """ Enrich the schedule with the postreco step """
1884  self.scheduleSequence('postreco_generator','postreco_step')
1885  return
1886 
1887 
1888  def prepare_VALIDATION(self, stepSpec = 'validation'):
1889  print(f"{stepSpec} in preparing validation")
1890  _,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.VALIDATIONDefaultCFF)
1891  from Validation.Configuration.autoValidation import autoValidation
1892  #in case VALIDATION:something:somethingelse -> something,somethingelse
1893  if sequence.find(',')!=-1:
1894  prevalSeqName=sequence.split(',')[0].split('+')
1895  valSeqName=sequence.split(',')[1].split('+')
1896  self.expandMapping(prevalSeqName,autoValidation,index=0)
1897  self.expandMapping(valSeqName,autoValidation,index=1)
1898  else:
1899  if '@' in sequence:
1900  prevalSeqName=sequence.split('+')
1901  valSeqName=sequence.split('+')
1902  self.expandMapping(prevalSeqName,autoValidation,index=0)
1903  self.expandMapping(valSeqName,autoValidation,index=1)
1904  else:
1905  postfix=''
1906  if sequence:
1907  postfix='_'+sequence
1908  prevalSeqName=['prevalidation'+postfix]
1909  valSeqName=['validation'+postfix]
1910  if not hasattr(self.process,valSeqName[0]):
1911  prevalSeqName=['']
1912  valSeqName=[sequence]
1913 
1914  def NFI(index):
1915 
1916  if index==0:
1917  return ''
1918  else:
1919  return '%s'%index
1920 
1921 
1922  #rename the HLT process in validation steps
1923  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1924  for s in valSeqName+prevalSeqName:
1925  if s:
1927  for (i,s) in enumerate(prevalSeqName):
1928  if s:
1929  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1930  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1931 
1932  for (i,s) in enumerate(valSeqName):
1933  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1934  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1935 
1936  #needed in case the miniAODValidation sequence is run starting from AODSIM
1937  if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1938  return
1939 
1940  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1941  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1942  self._options.restoreRNDSeeds=True
1943 
1944  if not 'DIGI' in self.stepMap and not self._options.isData and not self._options.fast:
1945  self.executeAndRemember("process.mix.playback = True")
1946  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1947  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1948  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1949 
1950  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1951  #will get in the schedule, smoothly
1952  for (i,s) in enumerate(valSeqName):
1953  getattr(self.process,'validation_step%s'%NFI(i)).insert(0, self.process.genstepfilter)
1954 
1955  return
1956 
1957 
1959  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1960  It will climb down within PSets, VPSets and VInputTags to find its target"""
1961  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1962  self._paramReplace = paramReplace
1963  self._paramSearch = paramSearch
1964  self._verbose = verbose
1965  self._whitelist = whitelist
1967  def doIt(self, pset, base):
1968  if isinstance(pset, cms._Parameterizable):
1969  for name in pset.parameters_().keys():
1970  # skip whitelisted parameters
1971  if name in self._whitelist:
1972  continue
1973  # if I use pset.parameters_().items() I get copies of the parameter values
1974  # so I can't modify the nested pset
1975  value = getattr(pset, name)
1976  valueType = type(value)
1977  if valueType in [cms.PSet, cms.untracked.PSet, cms.EDProducer]:
1978  self.doIt(value,base+"."+name)
1979  elif valueType in [cms.VPSet, cms.untracked.VPSet]:
1980  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1981  elif valueType in [cms.string, cms.untracked.string]:
1982  if value.value() == self._paramSearch:
1983  if self._verbose: print("set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace))
1984  setattr(pset, name,self._paramReplace)
1985  elif valueType in [cms.VInputTag, cms.untracked.VInputTag]:
1986  for (i,n) in enumerate(value):
1987  if not isinstance(n, cms.InputTag):
1988  n=cms.InputTag(n)
1989  if n.processName == self._paramSearch:
1990  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1991  if self._verbose:print("set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace))
1992  setattr(n,"processName",self._paramReplace)
1993  value[i]=n
1994  elif valueType in [cms.vstring, cms.untracked.vstring]:
1995  for (i,n) in enumerate(value):
1996  if n==self._paramSearch:
1997  getattr(pset,name)[i]=self._paramReplace
1998  elif valueType in [cms.InputTag, cms.untracked.InputTag]:
1999  if value.processName == self._paramSearch:
2000  if self._verbose: print("set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace))
2001  setattr(getattr(pset, name),"processName",self._paramReplace)
2002 
2003  def enter(self,visitee):
2004  label = ''
2005  try:
2006  label = visitee.label()
2007  except AttributeError:
2008  label = '<Module not in a Process>'
2009  except:
2010  label = 'other execption'
2011  self.doIt(visitee, label)
2012 
2013  def leave(self,visitee):
2014  pass
2015 
2016  #visit a sequence to repalce all input tags
2017  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
2018  print("Replacing all InputTag %s => %s"%(oldT,newT))
2019  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
2020  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
2021  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
2022  if not loadMe in self.additionalCommands:
2023  self.additionalCommands.append(loadMe)
2024  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
2025 
2026  #change the process name used to address HLT results in any sequence
2027  def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT', verbosityLevel=1):
2028  if proc == None:
2029  proc = self._options.hltProcess if self._options.hltProcess else self.process.name_()
2030  if proc == HLTprocess:
2031  return
2032  # look up all module in sequence
2033  if verbosityLevel > 0:
2034  print("replacing %s process name - sequence %s will use '%s'" % (HLTprocess, sequence, proc))
2035  verboseVisit = (verbosityLevel > 1)
2036  getattr(self.process,sequence).visit(
2037  ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess, proc, whitelist = ("subSystemFolder",), verbose = verboseVisit))
2038  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
2039  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
2041  'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",), verbose = %s))'
2042  % (sequence, HLTprocess, proc, verboseVisit))
2043 
2044  def expandMapping(self,seqList,mapping,index=None):
2045  maxLevel=30
2046  level=0
2047  while '@' in repr(seqList) and level<maxLevel:
2048  level+=1
2049  for specifiedCommand in seqList:
2050  if specifiedCommand.startswith('@'):
2051  location=specifiedCommand[1:]
2052  if not location in mapping:
2053  raise Exception("Impossible to map "+location+" from "+repr(mapping))
2054  mappedTo=mapping[location]
2055  if index!=None:
2056  mappedTo=mappedTo[index]
2057  seqList.remove(specifiedCommand)
2058  seqList.extend(mappedTo.split('+'))
2059  break;
2060  if level==maxLevel:
2061  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
2062 
2063  def prepare_DQM(self, stepSpec = 'DQMOffline'):
2064  # this one needs replacement
2065 
2066  # any 'DQM' job should use DQMStore in non-legacy mode (but not HARVESTING)
2067  self.loadAndRemember("DQMServices/Core/DQMStoreNonLegacy_cff")
2068  _,_dqmSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DQMOFFLINEDefaultCFF)
2069  sequenceList=_dqmSeq.split('+')
2070  postSequenceList=_dqmSeq.split('+')
2071  from DQMOffline.Configuration.autoDQM import autoDQM
2072  self.expandMapping(sequenceList,autoDQM,index=0)
2073  self.expandMapping(postSequenceList,autoDQM,index=1)
2074 
2075  if len(set(sequenceList))!=len(sequenceList):
2076  sequenceList=list(OrderedSet(sequenceList))
2077  print("Duplicate entries for DQM:, using",sequenceList)
2078 
2079  pathName='dqmoffline_step'
2080  for (i,_sequence) in enumerate(sequenceList):
2081  if (i!=0):
2082  pathName='dqmoffline_%d_step'%(i)
2083 
2084  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
2085  self.renameHLTprocessInSequence(_sequence)
2086 
2087  setattr(self.process,pathName, cms.EndPath( getattr(self.process,_sequence ) ) )
2088  self.schedule.append(getattr(self.process,pathName))
2089 
2090  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
2091  #will get in the schedule, smoothly
2092  getattr(self.process,pathName).insert(0,self.process.genstepfilter)
2093 
2094 
2095  pathName='dqmofflineOnPAT_step'
2096  for (i,_sequence) in enumerate(postSequenceList):
2097  #Fix needed to avoid duplication of sequences not defined in autoDQM or without a PostDQM
2098  if (sequenceList[i]==postSequenceList[i]):
2099  continue
2100  if (i!=0):
2101  pathName='dqmofflineOnPAT_%d_step'%(i)
2102 
2103  setattr(self.process,pathName, cms.EndPath( getattr(self.process, _sequence ) ) )
2104  self.schedule.append(getattr(self.process,pathName))
2105 
2106  def prepare_HARVESTING(self, stepSpec = None):
2107  """ Enrich the process with harvesting step """
2108  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
2110 
2111  harvestingConfig,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.HARVESTINGDefaultCFF)
2112 
2113  # decide which HARVESTING paths to use
2114  harvestingList = sequence.split("+")
2115  from DQMOffline.Configuration.autoDQM import autoDQM
2116  from Validation.Configuration.autoValidation import autoValidation
2117  import copy
2118  combined_mapping = copy.deepcopy( autoDQM )
2119  combined_mapping.update( autoValidation )
2120  self.expandMapping(harvestingList,combined_mapping,index=-1)
2121 
2122  if len(set(harvestingList))!=len(harvestingList):
2123  harvestingList=list(OrderedSet(harvestingList))
2124  print("Duplicate entries for HARVESTING, using",harvestingList)
2125 
2126  for name in harvestingList:
2127  if not name in harvestingConfig.__dict__:
2128  print(name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2129  # trigger hard error, like for other sequence types
2130  getattr(self.process, name)
2131  continue
2132  harvestingstream = getattr(harvestingConfig,name)
2133  if isinstance(harvestingstream,cms.Path):
2134  self.schedule.append(harvestingstream)
2135  self.blacklist_paths.append(harvestingstream)
2136  if isinstance(harvestingstream,cms.Sequence):
2137  setattr(self.process,name+"_step",cms.Path(harvestingstream))
2138  self.schedule.append(getattr(self.process,name+"_step"))
2139 
2140  # # NOTE: the "hltProcess" option currently does nothing in the HARVEST step
2141  # if self._options.hltProcess or ('HLT' in self.stepMap):
2142  # pass
2143 
2144  self.scheduleSequence('DQMSaver','dqmsave_step')
2145  return
2146 
2147  def prepare_ALCAHARVEST(self, stepSpec = None):
2148  """ Enrich the process with AlCaHarvesting step """
2149  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2150  sequence=stepSpec.split(".")[-1]
2151 
2152  # decide which AlcaHARVESTING paths to use
2153  harvestingList = sequence.split("+")
2154 
2155 
2156 
2157  from Configuration.AlCa.autoPCL import autoPCL
2158  self.expandMapping(harvestingList,autoPCL)
2159 
2160  for name in harvestingConfig.__dict__:
2161  harvestingstream = getattr(harvestingConfig,name)
2162  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2163  self.schedule.append(harvestingstream)
2164  if isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_dbOutput"), cms.VPSet) and \
2165  isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_metadata"), cms.VPSet):
2166  self.executeAndRemember("process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name + "_dbOutput)")
2167  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name + "_metadata)")
2168  else:
2169  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2170  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2171  harvestingList.remove(name)
2172  # append the common part at the end of the sequence
2173  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2174  self.schedule.append(lastStep)
2175 
2176  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2177  print("The following harvesting could not be found : ", harvestingList)
2178  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2179 
2180 
2181 
2182  def prepare_ENDJOB(self, stepSpec = 'endOfProcess'):
2183  _,_endjobSeq,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.ENDJOBDefaultCFF)
2184  self.scheduleSequenceAtEnd(_endjobSeq,'endjob_step')
2185  return
2186 
2187  def finalizeFastSimHLT(self):
2188  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2189  self.schedule.append(self.process.reconstruction)
2190 
2191 
2192  def build_production_info(self, evt_type, evtnumber):
2193  """ Add useful info for the production. """
2194  self.process.configurationMetadata=cms.untracked.PSet\
2195  (version=cms.untracked.string("$Revision: 1.19 $"),
2196  name=cms.untracked.string("Applications"),
2197  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2198  )
2199 
2200  self.addedObjects.append(("Production Info","configurationMetadata"))
2201 
2202 
2203  def create_process(self):
2204  self.pythonCfgCode = "# Auto generated configuration file\n"
2205  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2206  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2207  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2208 
2209  # now set up the modifies
2210  modifiers=[]
2211  modifierStrings=[]
2212  modifierImports=[]
2213 
2214  if hasattr(self._options,"era") and self._options.era :
2215  # Multiple eras can be specified in a comma seperated list
2216  from Configuration.StandardSequences.Eras import eras
2217  for requestedEra in self._options.era.split(",") :
2218  modifierStrings.append(requestedEra)
2219  modifierImports.append(eras.pythonCfgLines[requestedEra])
2220  modifiers.append(getattr(eras,requestedEra))
2221 
2222 
2223  if hasattr(self._options,"procModifiers") and self._options.procModifiers:
2224  import importlib
2225  thingsImported=[]
2226  for c in self._options.procModifiers:
2227  thingsImported.extend(c.split(","))
2228  for pm in thingsImported:
2229  modifierStrings.append(pm)
2230  modifierImports.append('from Configuration.ProcessModifiers.'+pm+'_cff import '+pm)
2231  modifiers.append(getattr(importlib.import_module('Configuration.ProcessModifiers.'+pm+'_cff'),pm))
2232 
2233  self.pythonCfgCode += '\n'.join(modifierImports)+'\n\n'
2234  self.pythonCfgCode += "process = cms.Process('"+self._options.name+"'" # Start of the line, finished after the loop
2235 
2236 
2237  if len(modifierStrings)>0:
2238  self.pythonCfgCode+= ','+','.join(modifierStrings)
2239  self.pythonCfgCode+=')\n\n'
2240 
2241  #yes, the cfg code gets out of sync here if a process is passed in. That could be fixed in the future
2242  #assuming there is some way for the fwk to get the list of modifiers (and their stringified name)
2243  if self.process == None:
2244  if len(modifiers)>0:
2245  self.process = cms.Process(self._options.name,*modifiers)
2246  else:
2247  self.process = cms.Process(self._options.name)
2248 
2249 
2250 
2251 
2252  def prepare(self, doChecking = False):
2253  """ Prepare the configuration string and add missing pieces."""
2254 
2255  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2256  self.addMaxEvents()
2257  if self.with_input:
2258  self.addSource()
2259  self.addStandardSequences()
2260 
2261  self.completeInputCommand()
2262  self.addConditions()
2263 
2264 
2265  outputModuleCfgCode=""
2266  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2267  outputModuleCfgCode=self.addOutput()
2268 
2269  self.addCommon()
2270 
2271  self.pythonCfgCode += "# import of standard configurations\n"
2272  for module in self.imports:
2273  self.pythonCfgCode += ("process.load('"+module+"')\n")
2274 
2275  # production info
2276  if not hasattr(self.process,"configurationMetadata"):
2277  self.build_production_info(self._options.evt_type, self._options.number)
2278  else:
2279  #the PSet was added via a load
2280  self.addedObjects.append(("Production Info","configurationMetadata"))
2281 
2282  self.pythonCfgCode +="\n"
2283  for comment,object in self.addedObjects:
2284  if comment!="":
2285  self.pythonCfgCode += "\n# "+comment+"\n"
2286  self.pythonCfgCode += dumpPython(self.process,object)
2287 
2288  # dump the output definition
2289  self.pythonCfgCode += "\n# Output definition\n"
2290  self.pythonCfgCode += outputModuleCfgCode
2291 
2292  # dump all additional outputs (e.g. alca or skim streams)
2293  self.pythonCfgCode += "\n# Additional output definition\n"
2294  #I do not understand why the keys are not normally ordered.
2295  nl=sorted(self.additionalOutputs.keys())
2296  for name in nl:
2297  output = self.additionalOutputs[name]
2298  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2299  tmpOut = cms.EndPath(output)
2300  setattr(self.process,name+'OutPath',tmpOut)
2301  self.schedule.append(tmpOut)
2302 
2303  # dump all additional commands
2304  self.pythonCfgCode += "\n# Other statements\n"
2305  for command in self.additionalCommands:
2306  self.pythonCfgCode += command + "\n"
2307 
2308  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2309  for object in self._options.inlineObjects.split(','):
2310  if not object:
2311  continue
2312  if not hasattr(self.process,object):
2313  print('cannot inline -'+object+'- : not known')
2314  else:
2315  self.pythonCfgCode +='\n'
2316  self.pythonCfgCode +=dumpPython(self.process,object)
2317 
2318  if self._options.pileup=='HiMixEmbGEN':
2319  self.pythonCfgCode += "\nprocess.generator.embeddingMode=cms.int32(1)\n"
2320 
2321  # dump all paths
2322  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2323  for path in self.process.paths:
2324  if getattr(self.process,path) not in self.blacklist_paths:
2325  self.pythonCfgCode += dumpPython(self.process,path)
2326 
2327  for endpath in self.process.endpaths:
2328  if getattr(self.process,endpath) not in self.blacklist_paths:
2329  self.pythonCfgCode += dumpPython(self.process,endpath)
2330 
2331  # dump the schedule
2332  self.pythonCfgCode += "\n# Schedule definition\n"
2333 
2334  # handling of the schedule
2335  pathNames = ['process.'+p.label_() for p in self.schedule]
2336  if self.process.schedule == None:
2337  self.process.schedule = cms.Schedule()
2338  for item in self.schedule:
2339  self.process.schedule.append(item)
2340  result = 'process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2341  else:
2342  if not isinstance(self.scheduleIndexOfFirstHLTPath, int):
2343  raise Exception('the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2344 
2345  for index, item in enumerate(self.schedule):
2346  if index < self.scheduleIndexOfFirstHLTPath:
2347  self.process.schedule.insert(index, item)
2348  else:
2349  self.process.schedule.append(item)
2350 
2351  result = "# process.schedule imported from cff in HLTrigger.Configuration\n"
2352  for index, item in enumerate(pathNames[:self.scheduleIndexOfFirstHLTPath]):
2353  result += 'process.schedule.insert('+str(index)+', '+item+')\n'
2354  if self.scheduleIndexOfFirstHLTPath < len(pathNames):
2355  result += 'process.schedule.extend(['+','.join(pathNames[self.scheduleIndexOfFirstHLTPath:])+'])\n'
2356 
2357  self.pythonCfgCode += result
2358 
2359  for labelToAssociate in self.labelsToAssociate:
2360  self.process.schedule.associate(getattr(self.process, labelToAssociate))
2361  self.pythonCfgCode += 'process.schedule.associate(process.' + labelToAssociate + ')\n'
2362 
2363  from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
2365  self.pythonCfgCode+="from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n"
2366  self.pythonCfgCode+="associatePatAlgosToolsTask(process)\n"
2367 
2368  overrideThreads = (self._options.nThreads != 1)
2369  overrideConcurrentLumis = (self._options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2370  overrideConcurrentIOVs = (self._options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2371 
2372  if overrideThreads or overrideConcurrentLumis or overrideConcurrentIOVs:
2373  self.pythonCfgCode +="\n"
2374  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2375  if overrideThreads:
2376  self.pythonCfgCode +="process.options.numberOfThreads = {}\n".format(self._options.nThreads)
2377  self.pythonCfgCode +="process.options.numberOfStreams = {}\n".format(self._options.nStreams)
2378  self.process.options.numberOfThreads = self._options.nThreads
2379  self.process.options.numberOfStreams = self._options.nStreams
2380  if overrideConcurrentLumis:
2381  self.pythonCfgCode +="process.options.numberOfConcurrentLuminosityBlocks = {}\n".format(self._options.nConcurrentLumis)
2382  self.process.options.numberOfConcurrentLuminosityBlocks = self._options.nConcurrentLumis
2383  if overrideConcurrentIOVs:
2384  self.pythonCfgCode +="process.options.eventSetup.numberOfConcurrentIOVs = {}\n".format(self._options.nConcurrentIOVs)
2385  self.process.options.eventSetup.numberOfConcurrentIOVs = self._options.nConcurrentIOVs
2386 
2387  if self._options.accelerators is not None:
2388  accelerators = self._options.accelerators.split(',')
2389  self.pythonCfgCode += "\n"
2390  self.pythonCfgCode += "# Enable only these accelerator backends\n"
2391  self.pythonCfgCode += "process.load('Configuration.StandardSequences.Accelerators_cff')\n"
2392  self.pythonCfgCode += "process.options.accelerators = ['" + "', '".join(accelerators) + "']\n"
2393  self.process.load('Configuration.StandardSequences.Accelerators_cff')
2394  self.process.options.accelerators = accelerators
2395 
2396  #repacked version
2397  if self._options.isRepacked:
2398  self.pythonCfgCode +="\n"
2399  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2400  self.pythonCfgCode +="MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n"
2401  MassReplaceInputTag(self.process, new="rawDataMapperByLabel", old="rawDataCollector")
2402 
2403  # special treatment in case of production filter sequence 2/2
2404  if self.productionFilterSequence and not (self._options.pileup=='HiMixEmbGEN'):
2405  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2406  self.pythonCfgCode +='for path in process.paths:\n'
2407  if len(self.conditionalPaths):
2408  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2409  if len(self.excludedPaths):
2410  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2411  self.pythonCfgCode +='\tgetattr(process,path).insert(0, process.%s)\n'%(self.productionFilterSequence,)
2412  pfs = getattr(self.process,self.productionFilterSequence)
2413  for path in self.process.paths:
2414  if not path in self.conditionalPaths: continue
2415  if path in self.excludedPaths: continue
2416  getattr(self.process,path).insert(0, pfs)
2417 
2418 
2419  # dump customise fragment
2420  self.pythonCfgCode += self.addCustomise()
2421 
2422  if self._options.runUnscheduled:
2423  print("--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2424  # Keep the "unscheduled customise functions" separate for now,
2425  # there are customize functions given by users (in our unit
2426  # tests) that need to be run before the "unscheduled customise
2427  # functions"
2428  self.pythonCfgCode += self.addCustomise(1)
2429 
2430  self.pythonCfgCode += self.addCustomiseCmdLine()
2431 
2432  if hasattr(self.process,"logErrorHarvester"):
2433  #configure logErrorHarvester to wait for same EDProducers to finish as the OutputModules
2434  self.pythonCfgCode +="\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n"
2435  self.pythonCfgCode +="from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n"
2436  self.pythonCfgCode +="process = customiseLogErrorHarvesterUsingOutputCommands(process)\n"
2437  from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands
2439 
2440  # Temporary hack to put the early delete customization after
2441  # everything else
2442  #
2443  # FIXME: remove when no longer needed
2444  self.pythonCfgCode += "\n# Add early deletion of temporary data products to reduce peak memory need\n"
2445  self.pythonCfgCode += "from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2446  self.pythonCfgCode += "process = customiseEarlyDelete(process)\n"
2447  self.pythonCfgCode += "# End adding early deletion\n"
2448  from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
2449  self.process = customiseEarlyDelete(self.process)
2450 
2451  imports = cms.specialImportRegistry.getSpecialImports()
2452  if len(imports) > 0:
2453  #need to inject this at the top
2454  index = self.pythonCfgCode.find("import FWCore.ParameterSet.Config")
2455  #now find the end of line
2456  index = self.pythonCfgCode.find("\n",index)
2457  self.pythonCfgCode = self.pythonCfgCode[:index]+ "\n" + "\n".join(imports)+"\n" +self.pythonCfgCode[index:]
2458 
2459 
2460  # make the .io file
2461 
2462  if self._options.io:
2463  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2464  if not self._options.io.endswith('.io'): self._option.io+='.io'
2465  io=open(self._options.io,'w')
2466  ioJson={}
2467  if hasattr(self.process.source,"fileNames"):
2468  if len(self.process.source.fileNames.value()):
2469  ioJson['primary']=self.process.source.fileNames.value()
2470  if hasattr(self.process.source,"secondaryFileNames"):
2471  if len(self.process.source.secondaryFileNames.value()):
2472  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2473  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2474  ioJson['pileup']=self._options.pileup_input[4:]
2475  for (o,om) in self.process.outputModules_().items():
2476  ioJson[o]=om.fileName.value()
2477  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2478  if self.productionFilterSequence:
2479  ioJson['filter']=self.productionFilterSequence
2480  import json
2481  io.write(json.dumps(ioJson))
2482  return
2483 
2484 
def load(self, includeFile)
def prepare_L1(self, stepSpec=None)
def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ='')
def expandMapping(self, seqList, mapping, index=None)
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT', verbosityLevel=1)
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:37
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
Definition: MassReplace.py:79
inliner
load the relevant part
def expandNanoMapping(seqList, mapping, key)
Definition: autoNANO.py:1
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def prepare_CFWRITER(self, stepSpec=None)
def prepare_RECOBEFMIX(self, stepSpec="reconstruction")
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
Definition: FindCaloHit.cc:19
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, stepSpec=None)
assert(be >=bs)
def build_production_info(self, evt_type, evtnumber)
def ProcessName(process)
Definition: CustomConfigs.py:6
def prepare_RECOSIM(self, stepSpec="recosim")
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def prepare_L1HwVal(self, stepSpec='L1HwVal')
def prepare_DIGI2RAW(self, stepSpec=None)
def prepare_POSTRECO(self, stepSpec=None)
def prepare_SKIM(self, stepSpec="all")
def prepare_ALCAPRODUCER(self, stepSpec=None)
def prepare_HARVESTING(self, stepSpec=None)
def prepare_ALCAOUTPUT(self, stepSpec=None)
def prepare_RAW2DIGI(self, stepSpec="RawToDigi")
def prepare_GEN(self, stepSpec=None)
def prepare_FILTER(self, stepSpec=None)
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
def prepare_PAT(self, stepSpec="miniAOD")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_L1Reco(self, stepSpec="L1Reco")
def prepare_HLT(self, stepSpec=None)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
Definition: HCMethods.h:50
def prepare_DIGI(self, stepSpec=None)
def loadAndRemember(self, includeFile)
def prepare_ENDJOB(self, stepSpec='endOfProcess')
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_DQM(self, stepSpec='DQMOffline')
def prepare_ALCAHARVEST(self, stepSpec=None)
static std::string join(char **cmd)
Definition: RemoteFile.cc:19
def prepare_USER(self, stepSpec=None)
def prepare_ALCA(self, stepSpec=None, workflow='full')
def defineMixing(dict)
Definition: Mixing.py:207
def dumpPython(process, name)
def miniAOD_customizeOutput(out)
def encode(args, files)
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
Definition: helpers.py:24
def prepare_REPACK(self, stepSpec=None)
def prepare_NANOGEN(self, stepSpec="nanoAOD")
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_L1REPACK(self, stepSpec=None)
def prepare_L1TrackTrigger(self, stepSpec="L1TrackTrigger")
def prepare_RAW2RECO(self, stepSpec=None)
def prepare_NANO(self, stepSpec='')
def prepare_VALIDATION(self, stepSpec='validation')
def lumi_to_run(runs, events_in_sample, events_per_job)
Definition: LumiToRun.py:1
def scheduleSequenceAtEnd(self, seq, prefix)
#define str(s)
def prepare_RECO(self, stepSpec="reconstruction")
def prepare_SIM(self, stepSpec=None)
def filesFromList(fileName, s=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def prepare_PATGEN(self, stepSpec="miniGEN")
def prepare_LHE(self, stepSpec=None)
def prepare_DATAMIX(self, stepSpec=None)
def executeAndRemember(self, command)
nextScheduleIsConditional
put the filtering path in the schedule