CMS 3D CMS Logo

ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python3
2 
3 __version__ = "$Revision: 1.19 $"
4 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
5 
6 import FWCore.ParameterSet.Config as cms
7 from FWCore.ParameterSet.Modules import _Module
8 # The following import is provided for backward compatibility reasons.
9 # The function used to be defined in this file.
10 from FWCore.ParameterSet.MassReplace import massReplaceInputTag as MassReplaceInputTag
11 
12 import hashlib
13 import sys
14 import re
15 import collections
16 from subprocess import Popen,PIPE
17 import FWCore.ParameterSet.DictTypes as DictTypes
18 from FWCore.ParameterSet.OrderedSet import OrderedSet
19 class Options:
20  pass
21 
22 # the canonical defaults
23 defaultOptions = Options()
24 defaultOptions.datamix = 'DataOnSim'
25 defaultOptions.isMC=False
26 defaultOptions.isData=True
27 defaultOptions.step=''
28 defaultOptions.pileup='NoPileUp'
29 defaultOptions.pileup_input = None
30 defaultOptions.pileup_dasoption = ''
31 defaultOptions.geometry = 'SimDB'
32 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
33 defaultOptions.magField = ''
34 defaultOptions.conditions = None
35 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
36 defaultOptions.harvesting= 'AtRunEnd'
37 defaultOptions.gflash = False
38 defaultOptions.number = -1
39 defaultOptions.number_out = None
40 defaultOptions.arguments = ""
41 defaultOptions.name = "NO NAME GIVEN"
42 defaultOptions.evt_type = ""
43 defaultOptions.filein = ""
44 defaultOptions.dasquery=""
45 defaultOptions.dasoption=""
46 defaultOptions.secondfilein = ""
47 defaultOptions.customisation_file = []
48 defaultOptions.customisation_file_unsch = []
49 defaultOptions.customise_commands = ""
50 defaultOptions.inline_custom=False
51 defaultOptions.particleTable = 'pythiapdt'
52 defaultOptions.particleTableList = ['pythiapdt','pdt']
53 defaultOptions.dirin = ''
54 defaultOptions.dirout = ''
55 defaultOptions.filetype = 'EDM'
56 defaultOptions.fileout = 'output.root'
57 defaultOptions.filtername = ''
58 defaultOptions.lazy_download = False
59 defaultOptions.custom_conditions = ''
60 defaultOptions.hltProcess = ''
61 defaultOptions.eventcontent = None
62 defaultOptions.datatier = None
63 defaultOptions.inlineEventContent = True
64 defaultOptions.inlineObjects =''
65 defaultOptions.hideGen=False
66 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
67 defaultOptions.beamspot=None
68 defaultOptions.outputDefinition =''
69 defaultOptions.inputCommands = None
70 defaultOptions.outputCommands = None
71 defaultOptions.inputEventContent = ''
72 defaultOptions.dropDescendant = False
73 defaultOptions.relval = None
74 defaultOptions.prefix = None
75 defaultOptions.profile = None
76 defaultOptions.heap_profile = None
77 defaultOptions.maxmem_profile = None
78 defaultOptions.isRepacked = False
79 defaultOptions.restoreRNDSeeds = False
80 defaultOptions.donotDropOnInput = ''
81 defaultOptions.python_filename =''
82 defaultOptions.io=None
83 defaultOptions.lumiToProcess=None
84 defaultOptions.fast=False
85 defaultOptions.runsAndWeightsForMC = None
86 defaultOptions.runsScenarioForMC = None
87 defaultOptions.runsAndWeightsForMCIntegerWeights = None
88 defaultOptions.runsScenarioForMCIntegerWeights = None
89 defaultOptions.runUnscheduled = False
90 defaultOptions.timeoutOutput = False
91 defaultOptions.nThreads = 1
92 defaultOptions.nStreams = 0
93 defaultOptions.nConcurrentLumis = 0
94 defaultOptions.nConcurrentIOVs = 0
95 defaultOptions.accelerators = None
96 
97 # some helper routines
98 def dumpPython(process,name):
99  theObject = getattr(process,name)
100  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
101  return "process."+name+" = " + theObject.dumpPython()
102  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
103  return "process."+name+" = " + theObject.dumpPython()+"\n"
104  else:
105  return "process."+name+" = " + theObject.dumpPython()+"\n"
106 def filesFromList(fileName,s=None):
107  import os
108  import FWCore.ParameterSet.Config as cms
109  prim=[]
110  sec=[]
111  for line in open(fileName,'r'):
112  if line.count(".root")>=2:
113  #two files solution...
114  entries=line.replace("\n","").split()
115  prim.append(entries[0])
116  sec.append(entries[1])
117  elif (line.find(".root")!=-1):
118  entry=line.replace("\n","")
119  prim.append(entry)
120  # remove any duplicates but keep the order
121  file_seen = set()
122  prim = [f for f in prim if not (f in file_seen or file_seen.add(f))]
123  file_seen = set()
124  sec = [f for f in sec if not (f in file_seen or file_seen.add(f))]
125  if s:
126  if not hasattr(s,"fileNames"):
127  s.fileNames=cms.untracked.vstring(prim)
128  else:
129  s.fileNames.extend(prim)
130  if len(sec)!=0:
131  if not hasattr(s,"secondaryFileNames"):
132  s.secondaryFileNames=cms.untracked.vstring(sec)
133  else:
134  s.secondaryFileNames.extend(sec)
135  print("found files: ",prim)
136  if len(prim)==0:
137  raise Exception("There are not files in input from the file list")
138  if len(sec)!=0:
139  print("found parent files:",sec)
140  return (prim,sec)
141 
142 def filesFromDASQuery(query,option="",s=None):
143  import os,time
144  import FWCore.ParameterSet.Config as cms
145  prim=[]
146  sec=[]
147  print("the query is",query)
148  eC=5
149  count=0
150  while eC!=0 and count<3:
151  if count!=0:
152  print('Sleeping, then retrying DAS')
153  time.sleep(100)
154  p = Popen('dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=True, universal_newlines=True)
155  pipe=p.stdout.read()
156  tupleP = os.waitpid(p.pid, 0)
157  eC=tupleP[1]
158  count=count+1
159  if eC==0:
160  print("DAS succeeded after",count,"attempts",eC)
161  else:
162  print("DAS failed 3 times- I give up")
163  for line in pipe.split('\n'):
164  if line.count(".root")>=2:
165  #two files solution...
166  entries=line.replace("\n","").split()
167  prim.append(entries[0])
168  sec.append(entries[1])
169  elif (line.find(".root")!=-1):
170  entry=line.replace("\n","")
171  prim.append(entry)
172  # remove any duplicates
173  prim = sorted(list(set(prim)))
174  sec = sorted(list(set(sec)))
175  if s:
176  if not hasattr(s,"fileNames"):
177  s.fileNames=cms.untracked.vstring(prim)
178  else:
179  s.fileNames.extend(prim)
180  if len(sec)!=0:
181  if not hasattr(s,"secondaryFileNames"):
182  s.secondaryFileNames=cms.untracked.vstring(sec)
183  else:
184  s.secondaryFileNames.extend(sec)
185  print("found files: ",prim)
186  if len(sec)!=0:
187  print("found parent files:",sec)
188  return (prim,sec)
189 
190 def anyOf(listOfKeys,dict,opt=None):
191  for k in listOfKeys:
192  if k in dict:
193  toReturn=dict[k]
194  dict.pop(k)
195  return toReturn
196  if opt!=None:
197  return opt
198  else:
199  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
200 
202  """The main building routines """
203 
204  def __init__(self, options, process = None, with_output = False, with_input = False ):
205  """options taken from old cmsDriver and optparse """
206 
207  options.outfile_name = options.dirout+options.fileout
208 
209  self._options = options
210 
211  if self._options.isData and options.isMC:
212  raise Exception("ERROR: You may specify only --data or --mc, not both")
213  #if not self._options.conditions:
214  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
215 
216  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
217  if 'ENDJOB' in self._options.step:
218  if (hasattr(self._options,"outputDefinition") and \
219  self._options.outputDefinition != '' and \
220  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
221  (hasattr(self._options,"datatier") and \
222  self._options.datatier and \
223  'DQMIO' in self._options.datatier):
224  print("removing ENDJOB from steps since not compatible with DQMIO dataTier")
225  self._options.step=self._options.step.replace(',ENDJOB','')
226 
227 
228 
229  # what steps are provided by this class?
230  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
231  self.stepMap={}
232  self.stepKeys=[]
233  for step in self._options.step.split(","):
234  if step=='': continue
235  stepParts = step.split(":")
236  stepName = stepParts[0]
237  if stepName not in stepList and not stepName.startswith('re'):
238  raise ValueError("Step {} unknown. Available are {}".format( stepName , sorted(stepList)))
239  if len(stepParts)==1:
240  self.stepMap[stepName]=""
241  elif len(stepParts)==2:
242  self.stepMap[stepName]=stepParts[1].split('+')
243  elif len(stepParts)==3:
244  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
245  else:
246  raise ValueError(f"Step definition {step} invalid")
247  self.stepKeys.append(stepName)
248 
249  #print(f"map of steps is: {self.stepMap}")
250 
251  self.with_output = with_output
252  self.process=process
253 
254  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
255  self.with_output = False
256  self.with_input = with_input
257  self.imports = []
258  self.create_process()
259  self.define_Configs()
260  self.schedule = list()
262 
263  # we are doing three things here:
264  # creating a process to catch errors
265  # building the code to re-create the process
266 
268  # TODO: maybe a list of to be dumped objects would help as well
269  self.blacklist_paths = []
270  self.addedObjects = []
272 
278 
279  def profileOptions(self):
280  """
281  addIgProfService
282  Function to add the igprof profile service so that you can dump in the middle
283  of the run.
284  """
285  profileOpts = self._options.profile.split(':')
286  profilerStart = 1
287  profilerInterval = 100
288  profilerFormat = None
289  profilerJobFormat = None
290 
291  if len(profileOpts):
292  #type, given as first argument is unused here
293  profileOpts.pop(0)
294  if len(profileOpts):
295  startEvent = profileOpts.pop(0)
296  if not startEvent.isdigit():
297  raise Exception("%s is not a number" % startEvent)
298  profilerStart = int(startEvent)
299  if len(profileOpts):
300  eventInterval = profileOpts.pop(0)
301  if not eventInterval.isdigit():
302  raise Exception("%s is not a number" % eventInterval)
303  profilerInterval = int(eventInterval)
304  if len(profileOpts):
305  profilerFormat = profileOpts.pop(0)
306 
307 
308  if not profilerFormat:
309  profilerFormat = "%s___%s___%%I.gz" % (
310  self._options.evt_type.replace("_cfi", ""),
311  hashlib.md5(
312  (str(self._options.step) + str(self._options.pileup) + str(self._options.conditions) +
313  str(self._options.datatier) + str(self._options.profileTypeLabel)).encode('utf-8')
314  ).hexdigest()
315  )
316  if not profilerJobFormat and profilerFormat.endswith(".gz"):
317  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
318  elif not profilerJobFormat:
319  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
320 
321  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
322 
324  """
325  addJeProfService
326  Function to add the jemalloc heap profile service so that you can dump in the middle
327  of the run.
328  """
329  profileOpts = []
330  profilerStart = 1
331  profilerInterval = 100
332  profilerFormat = "jeprof_%s.heap"
333  profilerJobFormat = None
334 
335 
336  if not profilerJobFormat and profilerFormat.endswith(".heap"):
337  profilerJobFormat = profilerFormat.replace(".heap", "_EndOfJob.heap")
338  elif not profilerJobFormat:
339  profilerJobFormat = profilerFormat + "_EndOfJob.heap"
340 
341  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
342 
343  def load(self,includeFile):
344  includeFile = includeFile.replace('/','.')
345  self.process.load(includeFile)
346  return sys.modules[includeFile]
347 
348  def loadAndRemember(self, includeFile):
349  """helper routine to load am memorize imports"""
350  # we could make the imports a on-the-fly data method of the process instance itself
351  # not sure if the latter is a good idea
352  includeFile = includeFile.replace('/','.')
353  self.imports.append(includeFile)
354  self.process.load(includeFile)
355  return sys.modules[includeFile]
356 
357  def executeAndRemember(self, command):
358  """helper routine to remember replace statements"""
359  self.additionalCommands.append(command)
360  if not command.strip().startswith("#"):
361  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
362  import re
363  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
364  #exec(command.replace("process.","self.process."))
365 
366  def addCommon(self):
367  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
368  self.process.options.Rethrow = ['ProductNotFound']
369  self.process.options.fileMode = 'FULLMERGE'
370 
371  self.addedObjects.append(("","options"))
372 
373  if self._options.lazy_download:
374  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
375  stats = cms.untracked.bool(True),
376  enable = cms.untracked.bool(True),
377  cacheHint = cms.untracked.string("lazy-download"),
378  readHint = cms.untracked.string("read-ahead-buffered")
379  )
380  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
381 
382  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
383  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
384 
385  if self._options.profile:
386  (start, interval, eventFormat, jobFormat)=self.profileOptions()
387  self.process.IgProfService = cms.Service("IgProfService",
388  reportFirstEvent = cms.untracked.int32(start),
389  reportEventInterval = cms.untracked.int32(interval),
390  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
391  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
392  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
393 
394  if self._options.heap_profile:
395  (start, interval, eventFormat, jobFormat)=self.heapProfileOptions()
396  self.process.JeProfService = cms.Service("JeProfService",
397  reportFirstEvent = cms.untracked.int32(start),
398  reportEventInterval = cms.untracked.int32(interval),
399  reportToFileAtPostEvent = cms.untracked.string("%s"%(eventFormat)),
400  reportToFileAtPostEndJob = cms.untracked.string("%s"%(jobFormat)))
401  self.addedObjects.append(("Setup JeProf Service for heap profiling","JeProfService"))
402 
403  def addMaxEvents(self):
404  """Here we decide how many evts will be processed"""
405  self.process.maxEvents.input = self._options.number
406  if self._options.number_out:
407  self.process.maxEvents.output = self._options.number_out
408  self.addedObjects.append(("","maxEvents"))
409 
410  def addSource(self):
411  """Here the source is built. Priority: file, generator"""
412  self.addedObjects.append(("Input source","source"))
413 
414  def filesFromOption(self):
415  for entry in self._options.filein.split(','):
416  print("entry",entry)
417  if entry.startswith("filelist:"):
418  filesFromList(entry[9:],self.process.source)
419  elif entry.startswith("dbs:") or entry.startswith("das:"):
420  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
421  else:
422  self.process.source.fileNames.append(self._options.dirin+entry)
423  if self._options.secondfilein:
424  if not hasattr(self.process.source,"secondaryFileNames"):
425  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
426  for entry in self._options.secondfilein.split(','):
427  print("entry",entry)
428  if entry.startswith("filelist:"):
429  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
430  elif entry.startswith("dbs:") or entry.startswith("das:"):
431  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
432  else:
433  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
434 
435  if self._options.filein or self._options.dasquery:
436  if self._options.filetype == "EDM":
437  self.process.source=cms.Source("PoolSource",
438  fileNames = cms.untracked.vstring(),
439  secondaryFileNames= cms.untracked.vstring())
440  filesFromOption(self)
441  elif self._options.filetype == "DAT":
442  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
443  filesFromOption(self)
444  elif self._options.filetype == "LHE":
445  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
446  if self._options.filein.startswith("lhe:"):
447  #list the article directory automatically
448  args=self._options.filein.split(':')
449  article=args[1]
450  print('LHE input from article ',article)
451  location='/store/lhe/'
452  import os
453  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
454  for line in textOfFiles:
455  for fileName in [x for x in line.split() if '.lhe' in x]:
456  self.process.source.fileNames.append(location+article+'/'+fileName)
457  #check first if list of LHE files is loaded (not empty)
458  if len(line)<2:
459  print('Issue to load LHE files, please check and try again.')
460  sys.exit(-1)
461  #Additional check to protect empty fileNames in process.source
462  if len(self.process.source.fileNames)==0:
463  print('Issue with empty filename, but can pass line check')
464  sys.exit(-1)
465  if len(args)>2:
466  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
467  else:
468  filesFromOption(self)
469 
470  elif self._options.filetype == "DQM":
471  self.process.source=cms.Source("DQMRootSource",
472  fileNames = cms.untracked.vstring())
473  filesFromOption(self)
474 
475  elif self._options.filetype == "DQMDAQ":
476  # FIXME: how to configure it if there are no input files specified?
477  self.process.source=cms.Source("DQMStreamerReader")
478 
479 
480  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
481  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
482 
483  if self._options.dasquery!='':
484  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
485  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
486 
487  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
488  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
489 
490 
491  if 'GEN' in self.stepMap.keys() and not self._options.filetype == "LHE":
492  if self._options.inputCommands:
493  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
494  else:
495  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
496 
497  if self.process.source and self._options.inputCommands and not self._options.filetype == "LHE":
498  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
499  for command in self._options.inputCommands.split(','):
500  # remove whitespace around the keep/drop statements
501  command = command.strip()
502  if command=='': continue
503  self.process.source.inputCommands.append(command)
504  if not self._options.dropDescendant:
505  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
506 
507  if self._options.lumiToProcess:
508  import FWCore.PythonUtilities.LumiList as LumiList
509  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
510 
511  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
512  if self.process.source is None:
513  self.process.source=cms.Source("EmptySource")
514 
515  # modify source in case of run-dependent MC
516  self.runsAndWeights=None
517  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
518  if not self._options.isMC :
519  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
520  if self._options.runsAndWeightsForMC:
521  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
522  else:
523  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
524  if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
525  __import__(RunsAndWeights[self._options.runsScenarioForMC])
526  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
527  else:
528  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
529 
530  if self.runsAndWeights:
531  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
533  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
534  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
535 
536  # modify source in case of run-dependent MC (Run-3 method)
538  if self._options.runsAndWeightsForMCIntegerWeights or self._options.runsScenarioForMCIntegerWeights:
539  if not self._options.isMC :
540  raise Exception("options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
541  if self._options.runsAndWeightsForMCIntegerWeights:
542  self.runsAndWeightsInt = eval(self._options.runsAndWeightsForMCIntegerWeights)
543  else:
544  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
545  if isinstance(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights], str):
546  __import__(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights])
547  self.runsAndWeightsInt = sys.modules[RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
548  else:
549  self.runsAndWeightsInt = RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]
550 
551  if self.runsAndWeightsInt:
552  if not self._options.relval:
553  raise Exception("--relval option required when using --runsAndWeightsInt")
554  if 'DATAMIX' in self._options.step:
555  from SimGeneral.Configuration.LumiToRun import lumi_to_run
556  total_events, events_per_job = self._options.relval.split(',')
557  lumi_to_run_mapping = lumi_to_run(self.runsAndWeightsInt, int(total_events), int(events_per_job))
558  self.additionalCommands.append("process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " + str(lumi_to_run_mapping) + "])")
559 
560  return
561 
562  def addOutput(self):
563  """ Add output module to the process """
564  result=""
565  if self._options.outputDefinition:
566  if self._options.datatier:
567  print("--datatier & --eventcontent options ignored")
568 
569  #new output convention with a list of dict
570  outList = eval(self._options.outputDefinition)
571  for (id,outDefDict) in enumerate(outList):
572  outDefDictStr=outDefDict.__str__()
573  if not isinstance(outDefDict,dict):
574  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
575  #requires option: tier
576  theTier=anyOf(['t','tier','dataTier'],outDefDict)
577  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
578 
579  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
580  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
581  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
582  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
583  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
584  # module label has a particular role
585  if not theModuleLabel:
586  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
587  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
588  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
589  ]
590  for name in tryNames:
591  if not hasattr(self.process,name):
592  theModuleLabel=name
593  break
594  if not theModuleLabel:
595  raise Exception("cannot find a module label for specification: "+outDefDictStr)
596  if id==0:
597  defaultFileName=self._options.outfile_name
598  else:
599  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
600 
601  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
602  if not theFileName.endswith('.root'):
603  theFileName+='.root'
604 
605  if len(outDefDict):
606  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
607  if theStreamType=='DQMIO': theStreamType='DQM'
608  if theStreamType=='ALL':
609  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
610  else:
611  theEventContent = getattr(self.process, theStreamType+"EventContent")
612 
613 
614  addAlCaSelects=False
615  if theStreamType=='ALCARECO' and not theFilterName:
616  theFilterName='StreamALCACombined'
617  addAlCaSelects=True
618 
619  CppType='PoolOutputModule'
620  if self._options.timeoutOutput:
621  CppType='TimeoutPoolOutputModule'
622  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
623  output = cms.OutputModule(CppType,
624  theEventContent.clone(),
625  fileName = cms.untracked.string(theFileName),
626  dataset = cms.untracked.PSet(
627  dataTier = cms.untracked.string(theTier),
628  filterName = cms.untracked.string(theFilterName))
629  )
630  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
631  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
632  if not theSelectEvent and hasattr(self.process,'filtering_step'):
633  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
634  if theSelectEvent:
635  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
636 
637  if addAlCaSelects:
638  if not hasattr(output,'SelectEvents'):
639  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
640  for alca in self.AlCaPaths:
641  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
642 
643 
644  if hasattr(self.process,theModuleLabel):
645  raise Exception("the current process already has a module "+theModuleLabel+" defined")
646  #print "creating output module ",theModuleLabel
647  setattr(self.process,theModuleLabel,output)
648  outputModule=getattr(self.process,theModuleLabel)
649  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
650  path=getattr(self.process,theModuleLabel+'_step')
651  self.schedule.append(path)
652 
653  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
654  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"): return label
655  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
656  if theExtraOutputCommands:
657  if not isinstance(theExtraOutputCommands,list):
658  raise Exception("extra ouput command in --option must be a list of strings")
659  if hasattr(self.process,theStreamType+"EventContent"):
660  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
661  else:
662  outputModule.outputCommands.extend(theExtraOutputCommands)
663 
664  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
665 
666 
667  return result
668 
669  streamTypes=self._options.eventcontent.split(',')
670  tiers=self._options.datatier.split(',')
671  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
672  raise Exception("number of event content arguments does not match number of datatier arguments")
673 
674  # if the only step is alca we don't need to put in an output
675  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
676  return "\n"
677 
678  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
679  if streamType=='': continue
680  if streamType == 'ALCARECO' and not 'ALCAPRODUCER' in self._options.step: continue
681  if streamType=='DQMIO': streamType='DQM'
682  eventContent=streamType
683 
684  if streamType == "NANOEDMAOD" :
685  eventContent = "NANOAOD"
686  elif streamType == "NANOEDMAODSIM" :
687  eventContent = "NANOAODSIM"
688  theEventContent = getattr(self.process, eventContent+"EventContent")
689  if i==0:
690  theFileName=self._options.outfile_name
691  theFilterName=self._options.filtername
692  else:
693  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
694  theFilterName=self._options.filtername
695  CppType='PoolOutputModule'
696  if self._options.timeoutOutput:
697  CppType='TimeoutPoolOutputModule'
698  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
699  if "NANOAOD" in streamType : CppType='NanoAODOutputModule'
700  output = cms.OutputModule(CppType,
701  theEventContent,
702  fileName = cms.untracked.string(theFileName),
703  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
704  filterName = cms.untracked.string(theFilterName)
705  )
706  )
707  if hasattr(self.process,"generation_step") and streamType!='LHE':
708  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
709  if hasattr(self.process,"filtering_step"):
710  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
711 
712  if streamType=='ALCARECO':
713  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
714 
715  if "MINIAOD" in streamType:
716  from PhysicsTools.PatAlgos.slimming.miniAOD_tools import miniAOD_customizeOutput
718 
719  outputModuleName=streamType+'output'
720  setattr(self.process,outputModuleName,output)
721  outputModule=getattr(self.process,outputModuleName)
722  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
723  path=getattr(self.process,outputModuleName+'_step')
724  self.schedule.append(path)
725 
726  if self._options.outputCommands and streamType!='DQM':
727  for evct in self._options.outputCommands.split(','):
728  if not evct: continue
729  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
730 
731  if not self._options.inlineEventContent:
732  tmpstreamType=streamType
733  if "NANOEDM" in tmpstreamType :
734  tmpstreamType=tmpstreamType.replace("NANOEDM","NANO")
735  def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
736  return label
737  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
738 
739  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
740 
741  return result
742 
743  def addStandardSequences(self):
744  """
745  Add selected standard sequences to the process
746  """
747  # load the pile up file
748  if self._options.pileup:
749  pileupSpec=self._options.pileup.split(',')[0]
750 
751  #make sure there is a set of pileup files specified when needed
752  pileups_without_input=[defaultOptions.pileup,"Cosmics","default","HiMixNoPU",None]
753  if self._options.pileup not in pileups_without_input and self._options.pileup_input==None:
754  message = "Pileup scenerio requires input files. Please add an appropriate --pileup_input option"
755  raise Exception(message)
756 
757  # Does the requested pile-up scenario exist?
758  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
759  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
760  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
761  raise Exception(message)
762 
763  # Put mixing parameters in a dictionary
764  if '.' in pileupSpec:
765  mixingDict={'file':pileupSpec}
766  elif pileupSpec.startswith('file:'):
767  mixingDict={'file':pileupSpec[5:]}
768  else:
769  import copy
770  mixingDict=copy.copy(Mixing[pileupSpec])
771  if len(self._options.pileup.split(','))>1:
772  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
773 
774  # Load the pu cfg file corresponding to the requested pu scenario
775  if 'file:' in pileupSpec:
776  #the file is local
777  self.process.load(mixingDict['file'])
778  print("inlining mixing module configuration")
779  self._options.inlineObjects+=',mix'
780  else:
781  self.loadAndRemember(mixingDict['file'])
782 
783  mixingDict.pop('file')
784  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
785  if self._options.pileup_input:
786  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
787  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
788  elif self._options.pileup_input.startswith("filelist:"):
789  mixingDict['F']=(filesFromList(self._options.pileup_input[9:]))[0]
790  else:
791  mixingDict['F']=self._options.pileup_input.split(',')
792  specialization=defineMixing(mixingDict)
793  for command in specialization:
794  self.executeAndRemember(command)
795  if len(mixingDict)!=0:
796  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
797 
798 
799  # load the geometry file
800  try:
801  if len(self.stepMap):
802  self.loadAndRemember(self.GeometryCFF)
803  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
805  if self.geometryDBLabel:
806  self.executeAndRemember('if hasattr(process, "XMLFromDBSource"): process.XMLFromDBSource.label="%s"'%(self.geometryDBLabel))
807  self.executeAndRemember('if hasattr(process, "DDDetectorESProducerFromDB"): process.DDDetectorESProducerFromDB.label="%s"'%(self.geometryDBLabel))
808 
809  except ImportError:
810  print("Geometry option",self._options.geometry,"unknown.")
811  raise
812 
813  if len(self.stepMap):
814  self.loadAndRemember(self.magFieldCFF)
815 
816  for stepName in self.stepKeys:
817  stepSpec = self.stepMap[stepName]
818  print("Step:", stepName,"Spec:",stepSpec)
819  if stepName.startswith('re'):
820 
821  if stepName[2:] not in self._options.donotDropOnInput:
822  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
823  stepName=stepName[2:]
824  if stepSpec=="":
825  getattr(self,"prepare_"+stepName)(stepSpec = getattr(self,stepName+"DefaultSeq"))
826  elif isinstance(stepSpec, list):
827  getattr(self,"prepare_"+stepName)(stepSpec = '+'.join(stepSpec))
828  elif isinstance(stepSpec, tuple):
829  getattr(self,"prepare_"+stepName)(stepSpec = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
830  else:
831  raise ValueError("Invalid step definition")
832 
833  if self._options.restoreRNDSeeds!=False:
834  #it is either True, or a process name
835  if self._options.restoreRNDSeeds==True:
836  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
837  else:
838  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
839  if self._options.inputEventContent or self._options.inputCommands:
840  if self._options.inputCommands:
841  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
842  else:
843  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
844 
845 
846  def completeInputCommand(self):
847  if self._options.inputEventContent:
848  import copy
849  def dropSecondDropStar(iec):
850  #drop occurence of 'drop *' in the list
851  count=0
852  for item in iec:
853  if item=='drop *':
854  if count!=0:
855  iec.remove(item)
856  count+=1
857 
858 
859  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
860  for evct in self._options.inputEventContent.split(','):
861  if evct=='': continue
862  theEventContent = getattr(self.process, evct+"EventContent")
863  if hasattr(theEventContent,'outputCommands'):
864  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
865  if hasattr(theEventContent,'inputCommands'):
866  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
867 
868  dropSecondDropStar(self.process.source.inputCommands)
869 
870  if not self._options.dropDescendant:
871  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
872 
873 
874  return
875 
876  def addConditions(self):
877  """Add conditions to the process"""
878  if not self._options.conditions: return
879 
880  if 'FrontierConditions_GlobalTag' in self._options.conditions:
881  print('using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
882  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
883 
885  from Configuration.AlCa.GlobalTag import GlobalTag
886  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
887  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
888  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
889 
890 
891  def addCustomise(self,unsch=0):
892  """Include the customise code """
893 
894  custOpt=[]
895  if unsch==0:
896  for c in self._options.customisation_file:
897  custOpt.extend(c.split(","))
898  else:
899  for c in self._options.customisation_file_unsch:
900  custOpt.extend(c.split(","))
901 
902  custMap=DictTypes.SortedKeysDict()
903  for opt in custOpt:
904  if opt=='': continue
905  if opt.count('.')>1:
906  raise Exception("more than . in the specification:"+opt)
907  fileName=opt.split('.')[0]
908  if opt.count('.')==0: rest='customise'
909  else:
910  rest=opt.split('.')[1]
911  if rest=='py': rest='customise' #catch the case of --customise file.py
912 
913  if fileName in custMap:
914  custMap[fileName].extend(rest.split('+'))
915  else:
916  custMap[fileName]=rest.split('+')
917 
918  if len(custMap)==0:
919  final_snippet='\n'
920  else:
921  final_snippet='\n# customisation of the process.\n'
922 
923  allFcn=[]
924  for opt in custMap:
925  allFcn.extend(custMap[opt])
926  for fcn in allFcn:
927  if allFcn.count(fcn)!=1:
928  raise Exception("cannot specify twice "+fcn+" as a customisation method")
929 
930  for f in custMap:
931  # let python search for that package and do syntax checking at the same time
932  packageName = f.replace(".py","").replace("/",".")
933  __import__(packageName)
934  package = sys.modules[packageName]
935 
936  # now ask the package for its definition and pick .py instead of .pyc
937  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
938 
939  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
940  if self._options.inline_custom:
941  for line in file(customiseFile,'r'):
942  if "import FWCore.ParameterSet.Config" in line:
943  continue
944  final_snippet += line
945  else:
946  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
947  for fcn in custMap[f]:
948  print("customising the process with",fcn,"from",f)
949  if not hasattr(package,fcn):
950  #bound to fail at run time
951  raise Exception("config "+f+" has no function "+fcn)
952  #execute the command
953  self.process=getattr(package,fcn)(self.process)
954  #and print it in the configuration
955  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
956  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
957 
958  if len(custMap)!=0:
959  final_snippet += '\n# End of customisation functions\n'
960 
961 
962  return final_snippet
963 
964  def addCustomiseCmdLine(self):
965  final_snippet='\n# Customisation from command line\n'
966  if self._options.customise_commands:
967  import string
968  for com in self._options.customise_commands.split('\\n'):
969  com=com.lstrip()
970  self.executeAndRemember(com)
971  final_snippet +='\n'+com
972 
973  return final_snippet
974 
975  #----------------------------------------------------------------------------
976  # here the methods to define the python includes for each step or
977  # conditions
978  #----------------------------------------------------------------------------
979  def define_Configs(self):
980  if len(self.stepMap):
981  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
982  if self._options.particleTable not in defaultOptions.particleTableList:
983  print('Invalid particle table provided. Options are:')
984  print(defaultOptions.particleTable)
985  sys.exit(-1)
986  else:
987  if len(self.stepMap):
988  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
989 
990  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
991 
992  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
993  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
994  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
995  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
996  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
997  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
998  self.L1P2GTDefaultCFF = 'Configuration/StandardSequences/SimPhase2L1GlobalTriggerEmulator_cff'
999  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
1000  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
1001  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
1002  if self._options.isRepacked: self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_DataMapper_cff"
1003  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
1004  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
1005  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
1006  self.RECOSIMDefaultCFF="Configuration/StandardSequences/RecoSim_cff"
1007  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
1008  self.NANODefaultCFF="PhysicsTools/NanoAOD/nano_cff"
1009  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
1010  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
1011  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
1012  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
1013  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
1014  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
1015  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
1016  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
1017  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
1018  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
1019  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
1021  if "DATAMIX" in self.stepMap.keys():
1022  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
1023  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
1024  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
1025  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
1026 
1027  self.ALCADefaultSeq=None
1028  self.LHEDefaultSeq='externalLHEProducer'
1029  self.GENDefaultSeq='pgen'
1030  self.SIMDefaultSeq='psim'
1031  self.DIGIDefaultSeq='pdigi'
1033  self.DIGI2RAWDefaultSeq='DigiToRaw'
1034  self.HLTDefaultSeq='GRun'
1035  self.L1DefaultSeq=None
1041  self.RAW2DIGIDefaultSeq='RawToDigi'
1042  self.L1RecoDefaultSeq='L1Reco'
1043  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
1044  if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
1045  self.RECODefaultSeq='reconstruction'
1046  else:
1047  self.RECODefaultSeq='reconstruction_fromRECO'
1048  self.RECOSIMDefaultSeq='recosim'
1050  self.L1HwValDefaultSeq='L1HwVal'
1051  self.DQMDefaultSeq='DQMOffline'
1053  self.ENDJOBDefaultSeq='endOfProcess'
1054  self.REPACKDefaultSeq='DigiToRawRepack'
1055  self.PATDefaultSeq='miniAOD'
1056  self.PATGENDefaultSeq='miniGEN'
1057  #TODO: Check based of file input
1058  self.NANODefaultSeq='nanoSequence'
1059  self.NANODefaultCustom='nanoAOD_customizeCommon'
1061  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
1063  if not self._options.beamspot:
1064  self._options.beamspot=VtxSmearedDefaultKey
1065 
1066  # if its MC then change the raw2digi
1067  if self._options.isMC==True:
1068  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
1069  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
1070  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
1071  self.PATGENDefaultCFF="Configuration/StandardSequences/PATGEN_cff"
1072  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
1073  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
1074  self.NANODefaultSeq='nanoSequenceMC'
1075  else:
1076  self._options.beamspot = None
1077 
1078  #patch for gen, due to backward incompatibility
1079  if 'reGEN' in self.stepMap:
1080  self.GENDefaultSeq='fixGenInfo'
1081 
1082  if self._options.scenario=='cosmics':
1083  self._options.pileup='Cosmics'
1084  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1085  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1086  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1087  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1088  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1089  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1090  if self._options.isMC==True:
1091  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1092  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1093  self.RECODefaultSeq='reconstructionCosmics'
1094  self.DQMDefaultSeq='DQMOfflineCosmics'
1095 
1096  if self._options.scenario=='HeavyIons':
1097  if not self._options.beamspot:
1098  self._options.beamspot=VtxSmearedHIDefaultKey
1099  self.HLTDefaultSeq = 'HIon'
1100  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1101  self.VALIDATIONDefaultSeq=''
1102  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1103  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
1104  self.RECODefaultSeq='reconstruction'
1105  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1106  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1107  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1108  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1109  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1110  if self._options.isMC==True:
1111  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1112 
1113 
1116  self.USERDefaultSeq='user'
1117  self.USERDefaultCFF=None
1119  # the magnetic field
1120  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1121  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1122 
1123  # the geometry
1124  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1126  simGeometry=''
1127  if self._options.fast:
1128  if 'start' in self._options.conditions.lower():
1129  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1130  else:
1131  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1132  else:
1133  def inGeometryKeys(opt):
1134  from Configuration.StandardSequences.GeometryConf import GeometryConf
1135  if opt in GeometryConf:
1136  return GeometryConf[opt]
1137  else:
1138  return opt
1139 
1140  geoms=self._options.geometry.split(',')
1141  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1142  if len(geoms)==2:
1143  #may specify the reco geometry
1144  if '/' in geoms[1] or '_cff' in geoms[1]:
1145  self.GeometryCFF=geoms[1]
1146  else:
1147  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1148 
1149  if (geoms[0].startswith('DB:')):
1150  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1151  self.geometryDBLabel=geoms[0][3:]
1152  print("with DB:")
1153  else:
1154  if '/' in geoms[0] or '_cff' in geoms[0]:
1155  self.SimGeometryCFF=geoms[0]
1156  else:
1157  simGeometry=geoms[0]
1158  if self._options.gflash==True:
1159  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1160  else:
1161  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1162 
1163  # synchronize the geometry configuration and the FullSimulation sequence to be used
1164  if simGeometry not in defaultOptions.geometryExtendedOptions:
1165  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1166 
1167  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1168  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1169  self._options.beamspot='NoSmear'
1170 
1171  # fastsim requires some changes to the default cff files and sequences
1172  if self._options.fast:
1173  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1174  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1175  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1176  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1177  self.NANODefaultSeq = 'nanoSequenceFS'
1178  self.DQMOFFLINEDefaultCFF="DQMOffline.Configuration.DQMOfflineFS_cff"
1179 
1180  # Mixing
1181  if self._options.pileup=='default':
1182  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1183  self._options.pileup=MixingDefaultKey
1184 
1185 
1186  #not driven by a default cff anymore
1187  if self._options.isData:
1188  self._options.pileup=None
1189 
1190 
1193  # for alca, skims, etc
1194  def addExtraStream(self, name, stream, workflow='full'):
1195  # define output module and go from there
1196  output = cms.OutputModule("PoolOutputModule")
1197  if stream.selectEvents.parameters_().__len__()!=0:
1198  output.SelectEvents = stream.selectEvents
1199  else:
1200  output.SelectEvents = cms.untracked.PSet()
1201  output.SelectEvents.SelectEvents=cms.vstring()
1202  if isinstance(stream.paths,tuple):
1203  for path in stream.paths:
1204  output.SelectEvents.SelectEvents.append(path.label())
1205  else:
1206  output.SelectEvents.SelectEvents.append(stream.paths.label())
1207 
1208 
1209 
1210  if isinstance(stream.content,str):
1211  evtPset=getattr(self.process,stream.content)
1212  for p in evtPset.parameters_():
1213  setattr(output,p,getattr(evtPset,p))
1214  if not self._options.inlineEventContent:
1215  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1216  return label
1217  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1218  else:
1219  output.outputCommands = stream.content
1220 
1221 
1222  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1223 
1224  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1225  filterName = cms.untracked.string(stream.name))
1226 
1227  if self._options.filtername:
1228  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1229 
1230  #add an automatic flushing to limit memory consumption
1231  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1232 
1233  if workflow in ("producers,full"):
1234  if isinstance(stream.paths,tuple):
1235  for path in stream.paths:
1236  self.schedule.append(path)
1237  else:
1238  self.schedule.append(stream.paths)
1239 
1240 
1241  # in case of relvals we don't want to have additional outputs
1242  if (not self._options.relval) and workflow in ("full","output"):
1243  self.additionalOutputs[name] = output
1244  setattr(self.process,name,output)
1245 
1246  if workflow == 'output':
1247  # adjust the select events to the proper trigger results from previous process
1248  filterList = output.SelectEvents.SelectEvents
1249  for i, filter in enumerate(filterList):
1250  filterList[i] = filter+":"+self._options.triggerResultsProcess
1251 
1252  return output
1253 
1254  #----------------------------------------------------------------------------
1255  # here the methods to create the steps. Of course we are doing magic here ;)
1256  # prepare_STEPNAME modifies self.process and what else's needed.
1257  #----------------------------------------------------------------------------
1258 
1259  def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ=''):
1260  _dotsplit = stepSpec.split('.')
1261  if ( len(_dotsplit)==1 ):
1262  if '/' in _dotsplit[0]:
1263  _sequence = defaultSEQ if defaultSEQ else stepSpec
1264  _cff = _dotsplit[0]
1265  else:
1266  _sequence = stepSpec
1267  _cff = defaultCFF
1268  elif ( len(_dotsplit)==2 ):
1269  _cff,_sequence = _dotsplit
1270  else:
1271  print("sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1272  print(stepSpec,"not recognized")
1273  raise
1274  l=self.loadAndRemember(_cff)
1275  return l,_sequence,_cff
1276 
1277  def scheduleSequence(self,seq,prefix,what='Path'):
1278  if '*' in seq:
1279  #create only one path with all sequences in it
1280  for i,s in enumerate(seq.split('*')):
1281  if i==0:
1282  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1283  else:
1284  p=getattr(self.process,prefix)
1285  tmp = getattr(self.process, s)
1286  if isinstance(tmp, cms.Task):
1287  p.associate(tmp)
1288  else:
1289  p+=tmp
1290  self.schedule.append(getattr(self.process,prefix))
1291  return
1292  else:
1293  #create as many path as many sequences
1294  if not '+' in seq:
1295  if self.nextScheduleIsConditional:
1296  self.conditionalPaths.append(prefix)
1297  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1298  self.schedule.append(getattr(self.process,prefix))
1299  else:
1300  for i,s in enumerate(seq.split('+')):
1301  sn=prefix+'%d'%(i)
1302  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1303  self.schedule.append(getattr(self.process,sn))
1304  return
1305 
1306  def scheduleSequenceAtEnd(self,seq,prefix):
1307  self.scheduleSequence(seq,prefix,what='EndPath')
1308  return
1309 
1310  def prepare_ALCAPRODUCER(self, stepSpec = None):
1311  self.prepare_ALCA(stepSpec, workflow = "producers")
1312 
1313  def prepare_ALCAOUTPUT(self, stepSpec = None):
1314  self.prepare_ALCA(stepSpec, workflow = "output")
1315 
1316  def prepare_ALCA(self, stepSpec = None, workflow = 'full'):
1317  """ Enrich the process with alca streams """
1318  alcaConfig,sequence,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.ALCADefaultCFF)
1319 
1320  MAXLEN=31 #the alca producer name should be shorter than 31 chars as per https://cms-talk.web.cern.ch/t/alcaprompt-datasets-not-loaded-in-dbs/11146/2
1321  # decide which ALCA paths to use
1322  alcaList = sequence.split("+")
1323  for alca in alcaList:
1324  if (len(alca)>MAXLEN):
1325  raise Exception("The following alca "+str(alca)+" name (with length "+str(len(alca))+" chars) cannot be accepted because it exceeds the DBS constraints on the length of the name of the ALCARECOs producers ("+str(MAXLEN)+")!")
1326 
1327  maxLevel=0
1328  from Configuration.AlCa.autoAlca import autoAlca, AlCaNoConcurrentLumis
1329  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1330  self.expandMapping(alcaList,autoAlca)
1331  self.AlCaPaths=[]
1332  for name in alcaConfig.__dict__:
1333  alcastream = getattr(alcaConfig,name)
1334  shortName = name.replace('ALCARECOStream','')
1335  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1336  if shortName in AlCaNoConcurrentLumis:
1337  print("Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".format(shortName))
1338  self._options.nConcurrentLumis = 1
1339  self._options.nConcurrentIOVs = 1
1340  output = self.addExtraStream(name,alcastream, workflow = workflow)
1341  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1342  self.AlCaPaths.append(shortName)
1343  if 'DQM' in alcaList:
1344  if not self._options.inlineEventContent and hasattr(self.process,name):
1345  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1346  else:
1347  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1348 
1349  #rename the HLT process name in the alca modules
1350  if self._options.hltProcess or 'HLT' in self.stepMap:
1351  if isinstance(alcastream.paths,tuple):
1352  for path in alcastream.paths:
1353  self.renameHLTprocessInSequence(path.label())
1354  else:
1355  self.renameHLTprocessInSequence(alcastream.paths.label())
1356 
1357  for i in range(alcaList.count(shortName)):
1358  alcaList.remove(shortName)
1359 
1360  # DQM needs a special handling
1361  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1362  path = getattr(alcaConfig,name)
1363  self.schedule.append(path)
1364  alcaList.remove('DQM')
1365 
1366  if isinstance(alcastream,cms.Path):
1367  #black list the alca path so that they do not appear in the cfg
1368  self.blacklist_paths.append(alcastream)
1369 
1370 
1371  if len(alcaList) != 0:
1372  available=[]
1373  for name in alcaConfig.__dict__:
1374  alcastream = getattr(alcaConfig,name)
1375  if isinstance(alcastream,cms.FilteredStream):
1376  available.append(name.replace('ALCARECOStream',''))
1377  print("The following alcas could not be found "+str(alcaList))
1378  print("available ",available)
1379  #print "verify your configuration, ignoring for now"
1380  raise Exception("The following alcas could not be found "+str(alcaList))
1381 
1382  def prepare_LHE(self, stepSpec = None):
1383  #load the fragment
1384 
1385  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1386  print("Loading lhe fragment from",loadFragment)
1387  __import__(loadFragment)
1388  self.process.load(loadFragment)
1389 
1390  self._options.inlineObjects+=','+stepSpec
1391 
1392  getattr(self.process,stepSpec).nEvents = self._options.number
1393 
1394  #schedule it
1395  self.process.lhe_step = cms.Path( getattr( self.process,stepSpec) )
1396  self.excludedPaths.append("lhe_step")
1397  self.schedule.append( self.process.lhe_step )
1398 
1399  def prepare_GEN(self, stepSpec = None):
1400  """ load the fragment of generator configuration """
1401  loadFailure=False
1402  #remove trailing .py
1403  #support old style .cfi by changing into something.cfi into something_cfi
1404  #remove python/ from the name
1405  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1406  #standard location of fragments
1407  if not '/' in loadFragment:
1408  loadFragment='Configuration.Generator.'+loadFragment
1409  else:
1410  loadFragment=loadFragment.replace('/','.')
1411  try:
1412  print("Loading generator fragment from",loadFragment)
1413  __import__(loadFragment)
1414  except:
1415  loadFailure=True
1416  #if self.process.source and self.process.source.type_()=='EmptySource':
1417  if not (self._options.filein or self._options.dasquery):
1418  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1419 
1420  if not loadFailure:
1421  from Configuration.Generator.concurrentLumisDisable import noConcurrentLumiGenerators
1422 
1423  generatorModule=sys.modules[loadFragment]
1424  genModules=generatorModule.__dict__
1425  #remove lhe producer module since this should have been
1426  #imported instead in the LHE step
1427  if self.LHEDefaultSeq in genModules:
1428  del genModules[self.LHEDefaultSeq]
1429 
1430  if self._options.hideGen:
1431  self.loadAndRemember(loadFragment)
1432  else:
1433  self.process.load(loadFragment)
1434  # expose the objects from that fragment to the configuration
1435  import FWCore.ParameterSet.Modules as cmstypes
1436  for name in genModules:
1437  theObject = getattr(generatorModule,name)
1438  if isinstance(theObject, cmstypes._Module):
1439  self._options.inlineObjects=name+','+self._options.inlineObjects
1440  if theObject.type_() in noConcurrentLumiGenerators:
1441  print("Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".format(theObject.type_()))
1442  self._options.nConcurrentLumis = 1
1443  self._options.nConcurrentIOVs = 1
1444  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1445  self._options.inlineObjects+=','+name
1446 
1447  if stepSpec == self.GENDefaultSeq or stepSpec == 'pgen_genonly':
1448  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1449  self.productionFilterSequence = 'ProductionFilterSequence'
1450  elif 'generator' in genModules:
1451  self.productionFilterSequence = 'generator'
1452 
1453  """ Enrich the schedule with the rest of the generation step """
1454  _,_genSeqName,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.GENDefaultCFF)
1455 
1456  if True:
1457  try:
1458  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1459  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1460  self.loadAndRemember(cffToBeLoaded)
1461  except ImportError:
1462  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1463 
1464  if self._options.scenario == 'HeavyIons':
1465  if self._options.pileup=='HiMixGEN':
1466  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1467  elif self._options.pileup=='HiMixEmbGEN':
1468  self.loadAndRemember("Configuration/StandardSequences/GeneratorEmbMix_cff")
1469  else:
1470  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1471 
1472  self.process.generation_step = cms.Path( getattr(self.process,_genSeqName) )
1473  self.schedule.append(self.process.generation_step)
1474 
1475  #register to the genstepfilter the name of the path (static right now, but might evolve)
1476  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1477 
1478  if 'reGEN' in self.stepMap or stepSpec == 'pgen_smear':
1479  #stop here
1480  return
1481 
1482  """ Enrich the schedule with the summary of the filter step """
1483  #the gen filter in the endpath
1484  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1485  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1486  return
1487 
1488  def prepare_SIM(self, stepSpec = None):
1489  """ Enrich the schedule with the simulation step"""
1490  _,_simSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.SIMDefaultCFF)
1491  if not self._options.fast:
1492  if self._options.gflash==True:
1493  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1494 
1495  if self._options.magField=='0T':
1496  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1497  else:
1498  if self._options.magField=='0T':
1499  self.executeAndRemember("process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1500 
1501  self.scheduleSequence(_simSeq,'simulation_step')
1502  return
1503 
1504  def prepare_DIGI(self, stepSpec = None):
1505  """ Enrich the schedule with the digitisation step"""
1506  _,_digiSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DIGIDefaultCFF)
1507 
1508  if self._options.gflash==True:
1509  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1510 
1511  if _digiSeq == 'pdigi_valid' or _digiSeq == 'pdigi_hi':
1512  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1513 
1514  if _digiSeq != 'pdigi_nogen' and _digiSeq != 'pdigi_valid_nogen' and _digiSeq != 'pdigi_hi_nogen' and not self.process.source.type_()=='EmptySource' and not self._options.filetype == "LHE":
1515  if self._options.inputEventContent=='':
1516  self._options.inputEventContent='REGEN'
1517  else:
1518  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1519 
1520 
1521  self.scheduleSequence(_digiSeq,'digitisation_step')
1522  return
1523 
1524  def prepare_CFWRITER(self, stepSpec = None):
1525  """ Enrich the schedule with the crossing frame writer step"""
1527  self.scheduleSequence('pcfw','cfwriter_step')
1528  return
1529 
1530  def prepare_DATAMIX(self, stepSpec = None):
1531  """ Enrich the schedule with the digitisation step"""
1533  self.scheduleSequence('pdatamix','datamixing_step')
1534 
1535  if self._options.pileup_input:
1536  theFiles=''
1537  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1538  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1539  elif self._options.pileup_input.startswith("filelist:"):
1540  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1541  else:
1542  theFiles=self._options.pileup_input.split(',')
1543  #print theFiles
1544  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1545 
1546  return
1547 
1548  def prepare_DIGI2RAW(self, stepSpec = None):
1549  _,_digi2rawSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DIGI2RAWDefaultCFF)
1550  self.scheduleSequence(_digi2rawSeq,'digi2raw_step')
1551  return
1552 
1553  def prepare_REPACK(self, stepSpec = None):
1554  _,_repackSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.REPACKDefaultCFF)
1555  self.scheduleSequence(_repackSeq,'digi2repack_step')
1556  return
1557 
1558  def loadPhase2GTMenu(self, menuFile: str):
1559  import importlib
1560  menuPath = f'L1Trigger.Configuration.Phase2GTMenus.{menuFile}'
1561  menuModule = importlib.import_module(menuPath)
1562 
1563  theMenu = menuModule.menu
1564  triggerPaths = [] #we get a list of paths in each of these files to schedule
1565 
1566  for triggerPathFile in theMenu:
1567  self.loadAndRemember(triggerPathFile) #this load and remember will set the algo variable of the algoblock later
1568 
1569  triggerPathModule = importlib.import_module(triggerPathFile)
1570  for objName in dir(triggerPathModule):
1571  obj = getattr(triggerPathModule, objName)
1572  objType = type(obj)
1573  if objType == cms.Path:
1574  triggerPaths.append(objName)
1575 
1576  triggerScheduleList = [getattr(self.process, name) for name in triggerPaths] #get the actual paths to put in the schedule
1577  self.schedule.extend(triggerScheduleList) #put them in the schedule for later
1578 
1579  # create the L1 GT step
1580  # We abuse the stepSpec a bit as a way to specify a menu
1581  def prepare_L1P2GT(self, stepSpec=None):
1582  """ Run the GT emulation sequence on top of the L1 emulation step """
1584  self.scheduleSequence('l1tGTProducerSequence', 'Phase2L1GTProducer')
1585  self.scheduleSequence('l1tGTAlgoBlockProducerSequence', 'Phase2L1GTAlgoBlockProducer')
1586  if stepSpec == None:
1587  defaultMenuFile = "prototype_2023_v1_0_0"
1588  self.loadPhase2GTMenu(menuFile = defaultMenuFile)
1589  else:
1590  self.loadPhase2GTMenu(menuFile = stepSpec)
1591 
1592  def prepare_L1(self, stepSpec = None):
1593  """ Enrich the schedule with the L1 simulation step"""
1594  assert(stepSpec == None)
1595  self.loadAndRemember(self.L1EMDefaultCFF)
1596  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1597  return
1598 
1599  def prepare_L1REPACK(self, stepSpec = None):
1600  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1601  supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT','CalouGT']
1602  if stepSpec in supported:
1603  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'% stepSpec)
1604  if self._options.scenario == 'HeavyIons':
1605  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1606  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1607  else:
1608  print("L1REPACK with '",stepSpec,"' is not supported! Supported choices are: ",supported)
1609  raise Exception('unsupported feature')
1610 
1611  def prepare_HLT(self, stepSpec = None):
1612  """ Enrich the schedule with the HLT simulation step"""
1613  if not stepSpec:
1614  print("no specification of the hlt menu has been given, should never happen")
1615  raise Exception('no HLT specifications provided')
1616 
1617  if '@' in stepSpec:
1618  # case where HLT:@something was provided
1619  from Configuration.HLT.autoHLT import autoHLT
1620  key = stepSpec[1:]
1621  if key in autoHLT:
1622  stepSpec = autoHLT[key]
1623  else:
1624  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1625 
1626  if ',' in stepSpec:
1627  #case where HLT:something:something was provided
1628  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1629  optionsForHLT = {}
1630  if self._options.scenario == 'HeavyIons':
1631  optionsForHLT['type'] = 'HIon'
1632  else:
1633  optionsForHLT['type'] = 'GRun'
1634  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.items())
1635  if stepSpec == 'run,fromSource':
1636  if hasattr(self.process.source,'firstRun'):
1637  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1638  elif hasattr(self.process.source,'setRunNumber'):
1639  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1640  else:
1641  raise Exception(f'Cannot replace menu to load {stepSpec}')
1642  else:
1643  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(stepSpec.replace(',',':'),optionsForHLTConfig))
1644  else:
1645  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % stepSpec)
1646 
1647  if self._options.isMC:
1648  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1649 
1650  if self._options.name != 'HLT':
1651  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1652  self.additionalCommands.append('process = ProcessName(process)')
1653  self.additionalCommands.append('')
1654  from HLTrigger.Configuration.CustomConfigs import ProcessName
1655  self.process = ProcessName(self.process)
1656 
1657  if self.process.schedule == None:
1658  raise Exception('the HLT step did not attach a valid schedule to the process')
1659 
1660  self.scheduleIndexOfFirstHLTPath = len(self.schedule)
1661  [self.blacklist_paths.append(path) for path in self.process.schedule if isinstance(path,(cms.Path,cms.EndPath))]
1662 
1663  # this is a fake, to be removed with fastim migration and HLT menu dump
1664  if self._options.fast:
1665  if not hasattr(self.process,'HLTEndSequence'):
1666  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1667 
1668 
1669  def prepare_RAW2RECO(self, stepSpec = None):
1670  if ','in stepSpec:
1671  seqReco,seqDigi=stepSpec.spli(',')
1672  else:
1673  print(f"RAW2RECO requires two specifications {stepSpec} insufficient")
1674 
1675  self.prepare_RAW2DIGI(seqDigi)
1676  self.prepare_RECO(seqReco)
1677  return
1678 
1679  def prepare_RAW2DIGI(self, stepSpec = "RawToDigi"):
1680  _,_raw2digiSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RAW2DIGIDefaultCFF)
1681  self.scheduleSequence(_raw2digiSeq,'raw2digi_step')
1682  return
1683 
1684  def prepare_PATFILTER(self, stepSpec = None):
1685  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1686  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1687  for filt in allMetFilterPaths:
1688  self.schedule.append(getattr(self.process,'Flag_'+filt))
1689 
1690  def prepare_L1HwVal(self, stepSpec = 'L1HwVal'):
1691  ''' Enrich the schedule with L1 HW validation '''
1692  self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1HwValDefaultCFF)
1693  print('\n\n\n DEPRECATED this has no action \n\n\n')
1694  return
1695 
1696  def prepare_L1Reco(self, stepSpec = "L1Reco"):
1697  ''' Enrich the schedule with L1 reconstruction '''
1698  _,_l1recoSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1RecoDefaultCFF)
1699  self.scheduleSequence(_l1recoSeq,'L1Reco_step')
1700  return
1701 
1702  def prepare_L1TrackTrigger(self, stepSpec = "L1TrackTrigger"):
1703  ''' Enrich the schedule with L1 reconstruction '''
1704  _,_l1tracktriggerSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1TrackTriggerDefaultCFF)
1705  self.scheduleSequence(_l1tracktriggerSeq,'L1TrackTrigger_step')
1706  return
1707 
1708  def prepare_FILTER(self, stepSpec = None):
1709  ''' Enrich the schedule with a user defined filter sequence '''
1710 
1711  filterConfig,filterSeq = stepSpec.split('.')
1712  filterConfig=self.load(filterConfig)
1713 
1714  class PrintAllModules(object):
1715  def __init__(self):
1716  self.inliner=''
1717  pass
1718  def enter(self,visitee):
1719  try:
1720  label=visitee.label()
1721 
1722  self.inliner=label+','+self.inliner
1723  except:
1724  pass
1725  def leave(self,v): pass
1726 
1727  expander=PrintAllModules()
1728  getattr(self.process,filterSeq).visit( expander )
1729  self._options.inlineObjects+=','+expander.inliner
1730  self._options.inlineObjects+=','+filterSeq
1731 
1732 
1733  self.scheduleSequence(filterSeq,'filtering_step')
1734  self.nextScheduleIsConditional=True
1735 
1736  self.productionFilterSequence = filterSeq
1737 
1738  return
1739 
1740  def prepare_RECO(self, stepSpec = "reconstruction"):
1741  ''' Enrich the schedule with reconstruction '''
1742  _,_recoSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RECODefaultCFF)
1743  self.scheduleSequence(_recoSeq,'reconstruction_step')
1744  return
1745 
1746  def prepare_RECOSIM(self, stepSpec = "recosim"):
1747  ''' Enrich the schedule with reconstruction '''
1748  _,_recosimSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RECOSIMDefaultCFF)
1749  self.scheduleSequence(_recosimSeq,'recosim_step')
1750  return
1751 
1752  def prepare_RECOBEFMIX(self, stepSpec = "reconstruction"):
1753  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1754  if not self._options.fast:
1755  print("ERROR: this step is only implemented for FastSim")
1756  sys.exit()
1757  _,_recobefmixSeq,_ = self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1758  self.scheduleSequence(_recobefmixSeq,'reconstruction_befmix_step')
1759  return
1760 
1761  def prepare_PAT(self, stepSpec = "miniAOD"):
1762  ''' Enrich the schedule with PAT '''
1763  self.prepare_PATFILTER(self)
1764  self.loadDefaultOrSpecifiedCFF(stepSpec,self.PATDefaultCFF)
1765  self.labelsToAssociate.append('patTask')
1766  if self._options.isData:
1767  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1768  else:
1769  if self._options.fast:
1770  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1771  else:
1772  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1773 
1774  if self._options.hltProcess:
1775  if len(self._options.customise_commands) > 1:
1776  self._options.customise_commands = self._options.customise_commands + " \n"
1777  self._options.customise_commands = self._options.customise_commands + "process.patTrigger.processName = \""+self._options.hltProcess+"\"\n"
1778  self._options.customise_commands = self._options.customise_commands + "process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1779  self._options.customise_commands = self._options.customise_commands + "process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1780 
1781 # self.renameHLTprocessInSequence(sequence)
1782 
1783  return
1784 
1785  def prepare_PATGEN(self, stepSpec = "miniGEN"):
1786  ''' Enrich the schedule with PATGEN '''
1787  self.loadDefaultOrSpecifiedCFF(stepSpec,self.PATGENDefaultCFF) #this is unscheduled
1788  self.labelsToAssociate.append('patGENTask')
1789  if self._options.isData:
1790  raise Exception("PATGEN step can only run on MC")
1791  return
1792 
1793  def prepare_NANO(self, stepSpec = '' ):
1794  print(f"in prepare_nano {stepSpec}")
1795  ''' Enrich the schedule with NANO '''
1796  if not '@' in stepSpec:
1797  _,_nanoSeq,_nanoCff = self.loadDefaultOrSpecifiedCFF(stepSpec,self.NANODefaultCFF,self.NANODefaultSeq)
1798  else:
1799  _nanoSeq = stepSpec
1800  _nanoCff = self.NANODefaultCFF
1801 
1802  print(_nanoSeq)
1803  # create full specified sequence using autoNANO
1804  from PhysicsTools.NanoAOD.autoNANO import autoNANO, expandNanoMapping
1805  # if not a autoNANO mapping, load an empty customization, which later will be converted into the default.
1806  _nanoCustoms = _nanoSeq.split('+') if '@' in stepSpec else ['']
1807  _nanoSeq = _nanoSeq.split('+')
1808  expandNanoMapping(_nanoSeq, autoNANO, 'sequence')
1809  expandNanoMapping(_nanoCustoms, autoNANO, 'customize')
1810  # make sure there are no duplicates while preserving the ordering
1811  _nanoSeq = list(sorted(set(_nanoSeq), key=_nanoSeq.index))
1812  _nanoCustoms = list(sorted(set(_nanoCustoms), key=_nanoCustoms.index))
1813  # replace empty sequence with default
1814  _nanoSeq = [seq if seq!='' else f"{self.NANODefaultCFF}.{self.NANODefaultSeq}" for seq in _nanoSeq]
1815  _nanoCustoms = [cust if cust!='' else self.NANODefaultCustom for cust in _nanoCustoms]
1816  # build and inject the sequence
1817  if len(_nanoSeq) < 1 and '@' in stepSpec:
1818  raise Exception(f'The specified mapping: {stepSpec} generates an empty NANO sequence. Please provide a valid mapping')
1819  _seqToSchedule = []
1820  for _subSeq in _nanoSeq:
1821  if '.' in _subSeq:
1822  _cff,_seq = _subSeq.split('.')
1823  print("NANO: scheduling:",_seq,"from",_cff)
1824  self.loadAndRemember(_cff)
1825  _seqToSchedule.append(_seq)
1826  elif '/' in _subSeq:
1827  self.loadAndRemember(_subSeq)
1828  _seqToSchedule.append(self.NANODefaultSeq)
1829  else:
1830  print("NANO: scheduling:",_subSeq)
1831  _seqToSchedule.append(_subSeq)
1832  self.scheduleSequence('+'.join(_seqToSchedule), 'nanoAOD_step')
1833 
1834  # add the customisations
1835  for custom in _nanoCustoms:
1836  custom_path = custom if '.' in custom else '.'.join([_nanoCff,custom])
1837  # customization order can be important for NANO, here later specified customise take precedence
1838  self._options.customisation_file.append(custom_path)
1839  if self._options.hltProcess:
1840  if len(self._options.customise_commands) > 1:
1841  self._options.customise_commands = self._options.customise_commands + " \n"
1842  self._options.customise_commands = self._options.customise_commands + "process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1843 
1844  def prepare_SKIM(self, stepSpec = "all"):
1845  ''' Enrich the schedule with skimming fragments'''
1846  skimConfig,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.SKIMDefaultCFF)
1847 
1848  stdHLTProcName = 'HLT'
1849  newHLTProcName = self._options.hltProcess
1850  customiseForReHLT = (newHLTProcName or (stdHLTProcName in self.stepMap)) and (newHLTProcName != stdHLTProcName)
1851  if customiseForReHLT:
1852  print("replacing %s process name - step SKIM:%s will use '%s'" % (stdHLTProcName, sequence, newHLTProcName))
1853 
1854 
1855  from Configuration.Skimming.autoSkim import autoSkim
1856  skimlist = sequence.split('+')
1857  self.expandMapping(skimlist,autoSkim)
1858 
1859  #print("dictionary for skims:", skimConfig.__dict__)
1860  for skim in skimConfig.__dict__:
1861  skimstream = getattr(skimConfig, skim)
1862 
1863  # blacklist AlCa paths so that they do not appear in the cfg
1864  if isinstance(skimstream, cms.Path):
1865  self.blacklist_paths.append(skimstream)
1866  # if enabled, apply "hltProcess" renaming to Sequences
1867  elif isinstance(skimstream, cms.Sequence):
1868  if customiseForReHLT:
1869  self.renameHLTprocessInSequence(skim, proc = newHLTProcName, HLTprocess = stdHLTProcName, verbosityLevel = 0)
1870 
1871  if not isinstance(skimstream, cms.FilteredStream):
1872  continue
1873 
1874  shortname = skim.replace('SKIMStream','')
1875  if (sequence=="all"):
1876  self.addExtraStream(skim,skimstream)
1877  elif (shortname in skimlist):
1878  self.addExtraStream(skim,skimstream)
1879  #add a DQM eventcontent for this guy
1880  if self._options.datatier=='DQM':
1881  self.process.load(self.EVTCONTDefaultCFF)
1882  skimstreamDQM = cms.FilteredStream(
1883  responsible = skimstream.responsible,
1884  name = skimstream.name+'DQM',
1885  paths = skimstream.paths,
1886  selectEvents = skimstream.selectEvents,
1887  content = self._options.datatier+'EventContent',
1888  dataTier = cms.untracked.string(self._options.datatier)
1889  )
1890  self.addExtraStream(skim+'DQM',skimstreamDQM)
1891  for i in range(skimlist.count(shortname)):
1892  skimlist.remove(shortname)
1893 
1894  if (skimlist.__len__()!=0 and sequence!="all"):
1895  print('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1896  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1897 
1898 
1899  def prepare_USER(self, stepSpec = None):
1900  ''' Enrich the schedule with a user defined sequence '''
1901  _,_userSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.USERDefaultCFF)
1902  self.scheduleSequence(_userSeq,'user_step')
1903  return
1904 
1905  def prepare_POSTRECO(self, stepSpec = None):
1906  """ Enrich the schedule with the postreco step """
1908  self.scheduleSequence('postreco_generator','postreco_step')
1909  return
1910 
1911 
1912  def prepare_VALIDATION(self, stepSpec = 'validation'):
1913  print(f"{stepSpec} in preparing validation")
1914  _,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.VALIDATIONDefaultCFF)
1915  from Validation.Configuration.autoValidation import autoValidation
1916  #in case VALIDATION:something:somethingelse -> something,somethingelse
1917  if sequence.find(',')!=-1:
1918  prevalSeqName=sequence.split(',')[0].split('+')
1919  valSeqName=sequence.split(',')[1].split('+')
1920  self.expandMapping(prevalSeqName,autoValidation,index=0)
1921  self.expandMapping(valSeqName,autoValidation,index=1)
1922  else:
1923  if '@' in sequence:
1924  prevalSeqName=sequence.split('+')
1925  valSeqName=sequence.split('+')
1926  self.expandMapping(prevalSeqName,autoValidation,index=0)
1927  self.expandMapping(valSeqName,autoValidation,index=1)
1928  else:
1929  postfix=''
1930  if sequence:
1931  postfix='_'+sequence
1932  prevalSeqName=['prevalidation'+postfix]
1933  valSeqName=['validation'+postfix]
1934  if not hasattr(self.process,valSeqName[0]):
1935  prevalSeqName=['']
1936  valSeqName=[sequence]
1937 
1938  def NFI(index):
1939 
1940  if index==0:
1941  return ''
1942  else:
1943  return '%s'%index
1944 
1945 
1946  #rename the HLT process in validation steps
1947  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1948  for s in valSeqName+prevalSeqName:
1949  if s:
1951  for (i,s) in enumerate(prevalSeqName):
1952  if s:
1953  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1954  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1955 
1956  for (i,s) in enumerate(valSeqName):
1957  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1958  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1959 
1960  #needed in case the miniAODValidation sequence is run starting from AODSIM
1961  if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1962  return
1963 
1964  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1965  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1966  self._options.restoreRNDSeeds=True
1967 
1968  if not 'DIGI' in self.stepMap and not self._options.isData and not self._options.fast:
1969  self.executeAndRemember("process.mix.playback = True")
1970  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1971  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1972  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1973 
1974  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1975  #will get in the schedule, smoothly
1976  for (i,s) in enumerate(valSeqName):
1977  getattr(self.process,'validation_step%s'%NFI(i)).insert(0, self.process.genstepfilter)
1978 
1979  return
1980 
1981 
1983  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1984  It will climb down within PSets, VPSets and VInputTags to find its target"""
1985  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1986  self._paramReplace = paramReplace
1987  self._paramSearch = paramSearch
1988  self._verbose = verbose
1989  self._whitelist = whitelist
1991  def doIt(self, pset, base):
1992  if isinstance(pset, cms._Parameterizable):
1993  for name in pset.parameters_().keys():
1994  # skip whitelisted parameters
1995  if name in self._whitelist:
1996  continue
1997  # if I use pset.parameters_().items() I get copies of the parameter values
1998  # so I can't modify the nested pset
1999  value = getattr(pset, name)
2000  valueType = type(value)
2001  if valueType in [cms.PSet, cms.untracked.PSet, cms.EDProducer]:
2002  self.doIt(value,base+"."+name)
2003  elif valueType in [cms.VPSet, cms.untracked.VPSet]:
2004  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
2005  elif valueType in [cms.string, cms.untracked.string]:
2006  if value.value() == self._paramSearch:
2007  if self._verbose: print("set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace))
2008  setattr(pset, name,self._paramReplace)
2009  elif valueType in [cms.VInputTag, cms.untracked.VInputTag]:
2010  for (i,n) in enumerate(value):
2011  if not isinstance(n, cms.InputTag):
2012  n=cms.InputTag(n)
2013  if n.processName == self._paramSearch:
2014  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
2015  if self._verbose:print("set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace))
2016  setattr(n,"processName",self._paramReplace)
2017  value[i]=n
2018  elif valueType in [cms.vstring, cms.untracked.vstring]:
2019  for (i,n) in enumerate(value):
2020  if n==self._paramSearch:
2021  getattr(pset,name)[i]=self._paramReplace
2022  elif valueType in [cms.InputTag, cms.untracked.InputTag]:
2023  if value.processName == self._paramSearch:
2024  if self._verbose: print("set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace))
2025  setattr(getattr(pset, name),"processName",self._paramReplace)
2026 
2027  def enter(self,visitee):
2028  label = ''
2029  try:
2030  label = visitee.label()
2031  except AttributeError:
2032  label = '<Module not in a Process>'
2033  except:
2034  label = 'other execption'
2035  self.doIt(visitee, label)
2036 
2037  def leave(self,visitee):
2038  pass
2039 
2040  #visit a sequence to repalce all input tags
2041  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
2042  print("Replacing all InputTag %s => %s"%(oldT,newT))
2043  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
2044  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
2045  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
2046  if not loadMe in self.additionalCommands:
2047  self.additionalCommands.append(loadMe)
2048  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
2049 
2050  #change the process name used to address HLT results in any sequence
2051  def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT', verbosityLevel=1):
2052  if proc == None:
2053  proc = self._options.hltProcess if self._options.hltProcess else self.process.name_()
2054  if proc == HLTprocess:
2055  return
2056  # look up all module in sequence
2057  if verbosityLevel > 0:
2058  print("replacing %s process name - sequence %s will use '%s'" % (HLTprocess, sequence, proc))
2059  verboseVisit = (verbosityLevel > 1)
2060  getattr(self.process,sequence).visit(
2061  ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess, proc, whitelist = ("subSystemFolder",), verbose = verboseVisit))
2062  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
2063  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
2065  'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",), verbose = %s))'
2066  % (sequence, HLTprocess, proc, verboseVisit))
2067 
2068  def expandMapping(self,seqList,mapping,index=None):
2069  maxLevel=30
2070  level=0
2071  while '@' in repr(seqList) and level<maxLevel:
2072  level+=1
2073  for specifiedCommand in seqList:
2074  if specifiedCommand.startswith('@'):
2075  location=specifiedCommand[1:]
2076  if not location in mapping:
2077  raise Exception("Impossible to map "+location+" from "+repr(mapping))
2078  mappedTo=mapping[location]
2079  if index!=None:
2080  mappedTo=mappedTo[index]
2081  seqList.remove(specifiedCommand)
2082  seqList.extend(mappedTo.split('+'))
2083  break;
2084  if level==maxLevel:
2085  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
2086 
2087  def prepare_DQM(self, stepSpec = 'DQMOffline'):
2088  # this one needs replacement
2089 
2090  # any 'DQM' job should use DQMStore in non-legacy mode (but not HARVESTING)
2091  self.loadAndRemember("DQMServices/Core/DQMStoreNonLegacy_cff")
2092  _,_dqmSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DQMOFFLINEDefaultCFF)
2093  sequenceList=_dqmSeq.split('+')
2094  postSequenceList=_dqmSeq.split('+')
2095  from DQMOffline.Configuration.autoDQM import autoDQM
2096  self.expandMapping(sequenceList,autoDQM,index=0)
2097  self.expandMapping(postSequenceList,autoDQM,index=1)
2098 
2099  if len(set(sequenceList))!=len(sequenceList):
2100  sequenceList=list(OrderedSet(sequenceList))
2101  print("Duplicate entries for DQM:, using",sequenceList)
2102 
2103  pathName='dqmoffline_step'
2104  for (i,_sequence) in enumerate(sequenceList):
2105  if (i!=0):
2106  pathName='dqmoffline_%d_step'%(i)
2107 
2108  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
2109  self.renameHLTprocessInSequence(_sequence)
2110 
2111  setattr(self.process,pathName, cms.EndPath( getattr(self.process,_sequence ) ) )
2112  self.schedule.append(getattr(self.process,pathName))
2113 
2114  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
2115  #will get in the schedule, smoothly
2116  getattr(self.process,pathName).insert(0,self.process.genstepfilter)
2117 
2118 
2119  pathName='dqmofflineOnPAT_step'
2120  for (i,_sequence) in enumerate(postSequenceList):
2121  #Fix needed to avoid duplication of sequences not defined in autoDQM or without a PostDQM
2122  if (sequenceList[i]==postSequenceList[i]):
2123  continue
2124  if (i!=0):
2125  pathName='dqmofflineOnPAT_%d_step'%(i)
2126 
2127  setattr(self.process,pathName, cms.EndPath( getattr(self.process, _sequence ) ) )
2128  self.schedule.append(getattr(self.process,pathName))
2129 
2130  def prepare_HARVESTING(self, stepSpec = None):
2131  """ Enrich the process with harvesting step """
2132  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
2134 
2135  harvestingConfig,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.HARVESTINGDefaultCFF)
2136 
2137  # decide which HARVESTING paths to use
2138  harvestingList = sequence.split("+")
2139  from DQMOffline.Configuration.autoDQM import autoDQM
2140  from Validation.Configuration.autoValidation import autoValidation
2141  import copy
2142  combined_mapping = copy.deepcopy( autoDQM )
2143  combined_mapping.update( autoValidation )
2144  self.expandMapping(harvestingList,combined_mapping,index=-1)
2145 
2146  if len(set(harvestingList))!=len(harvestingList):
2147  harvestingList=list(OrderedSet(harvestingList))
2148  print("Duplicate entries for HARVESTING, using",harvestingList)
2149 
2150  for name in harvestingList:
2151  if not name in harvestingConfig.__dict__:
2152  print(name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2153  # trigger hard error, like for other sequence types
2154  getattr(self.process, name)
2155  continue
2156  harvestingstream = getattr(harvestingConfig,name)
2157  if isinstance(harvestingstream,cms.Path):
2158  self.schedule.append(harvestingstream)
2159  self.blacklist_paths.append(harvestingstream)
2160  if isinstance(harvestingstream,cms.Sequence):
2161  setattr(self.process,name+"_step",cms.Path(harvestingstream))
2162  self.schedule.append(getattr(self.process,name+"_step"))
2163 
2164  # # NOTE: the "hltProcess" option currently does nothing in the HARVEST step
2165  # if self._options.hltProcess or ('HLT' in self.stepMap):
2166  # pass
2167 
2168  self.scheduleSequence('DQMSaver','dqmsave_step')
2169  return
2170 
2171  def prepare_ALCAHARVEST(self, stepSpec = None):
2172  """ Enrich the process with AlCaHarvesting step """
2173  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2174  sequence=stepSpec.split(".")[-1]
2175 
2176  # decide which AlcaHARVESTING paths to use
2177  harvestingList = sequence.split("+")
2178 
2179 
2180 
2181  from Configuration.AlCa.autoPCL import autoPCL
2182  self.expandMapping(harvestingList,autoPCL)
2183 
2184  for name in harvestingConfig.__dict__:
2185  harvestingstream = getattr(harvestingConfig,name)
2186  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2187  self.schedule.append(harvestingstream)
2188  if isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_dbOutput"), cms.VPSet) and \
2189  isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_metadata"), cms.VPSet):
2190  self.executeAndRemember("process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name + "_dbOutput)")
2191  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name + "_metadata)")
2192  else:
2193  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2194  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2195  harvestingList.remove(name)
2196  # append the common part at the end of the sequence
2197  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2198  self.schedule.append(lastStep)
2199 
2200  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2201  print("The following harvesting could not be found : ", harvestingList)
2202  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2203 
2204 
2205 
2206  def prepare_ENDJOB(self, stepSpec = 'endOfProcess'):
2207  _,_endjobSeq,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.ENDJOBDefaultCFF)
2208  self.scheduleSequenceAtEnd(_endjobSeq,'endjob_step')
2209  return
2210 
2211  def finalizeFastSimHLT(self):
2212  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2213  self.schedule.append(self.process.reconstruction)
2214 
2215 
2216  def build_production_info(self, evt_type, evtnumber):
2217  """ Add useful info for the production. """
2218  self.process.configurationMetadata=cms.untracked.PSet\
2219  (version=cms.untracked.string("$Revision: 1.19 $"),
2220  name=cms.untracked.string("Applications"),
2221  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2222  )
2223 
2224  self.addedObjects.append(("Production Info","configurationMetadata"))
2225 
2226 
2227  def create_process(self):
2228  self.pythonCfgCode = "# Auto generated configuration file\n"
2229  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2230  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2231  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2232 
2233  # now set up the modifies
2234  modifiers=[]
2235  modifierStrings=[]
2236  modifierImports=[]
2237 
2238  if hasattr(self._options,"era") and self._options.era :
2239  # Multiple eras can be specified in a comma seperated list
2240  from Configuration.StandardSequences.Eras import eras
2241  for requestedEra in self._options.era.split(",") :
2242  modifierStrings.append(requestedEra)
2243  modifierImports.append(eras.pythonCfgLines[requestedEra])
2244  modifiers.append(getattr(eras,requestedEra))
2245 
2246 
2247  if hasattr(self._options,"procModifiers") and self._options.procModifiers:
2248  import importlib
2249  thingsImported=[]
2250  for c in self._options.procModifiers:
2251  thingsImported.extend(c.split(","))
2252  for pm in thingsImported:
2253  modifierStrings.append(pm)
2254  modifierImports.append('from Configuration.ProcessModifiers.'+pm+'_cff import '+pm)
2255  modifiers.append(getattr(importlib.import_module('Configuration.ProcessModifiers.'+pm+'_cff'),pm))
2256 
2257  self.pythonCfgCode += '\n'.join(modifierImports)+'\n\n'
2258  self.pythonCfgCode += "process = cms.Process('"+self._options.name+"'" # Start of the line, finished after the loop
2259 
2260 
2261  if len(modifierStrings)>0:
2262  self.pythonCfgCode+= ','+','.join(modifierStrings)
2263  self.pythonCfgCode+=')\n\n'
2264 
2265  #yes, the cfg code gets out of sync here if a process is passed in. That could be fixed in the future
2266  #assuming there is some way for the fwk to get the list of modifiers (and their stringified name)
2267  if self.process == None:
2268  if len(modifiers)>0:
2269  self.process = cms.Process(self._options.name,*modifiers)
2270  else:
2271  self.process = cms.Process(self._options.name)
2272 
2273 
2274 
2275 
2276  def prepare(self, doChecking = False):
2277  """ Prepare the configuration string and add missing pieces."""
2278 
2279  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2280  self.addMaxEvents()
2281  if self.with_input:
2282  self.addSource()
2283  self.addStandardSequences()
2284 
2285  self.completeInputCommand()
2286  self.addConditions()
2287 
2288 
2289  outputModuleCfgCode=""
2290  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2291  outputModuleCfgCode=self.addOutput()
2292 
2293  self.addCommon()
2294 
2295  self.pythonCfgCode += "# import of standard configurations\n"
2296  for module in self.imports:
2297  self.pythonCfgCode += ("process.load('"+module+"')\n")
2298 
2299  # production info
2300  if not hasattr(self.process,"configurationMetadata"):
2301  self.build_production_info(self._options.evt_type, self._options.number)
2302  else:
2303  #the PSet was added via a load
2304  self.addedObjects.append(("Production Info","configurationMetadata"))
2305 
2306  self.pythonCfgCode +="\n"
2307  for comment,object in self.addedObjects:
2308  if comment!="":
2309  self.pythonCfgCode += "\n# "+comment+"\n"
2310  self.pythonCfgCode += dumpPython(self.process,object)
2311 
2312  # dump the output definition
2313  self.pythonCfgCode += "\n# Output definition\n"
2314  self.pythonCfgCode += outputModuleCfgCode
2315 
2316  # dump all additional outputs (e.g. alca or skim streams)
2317  self.pythonCfgCode += "\n# Additional output definition\n"
2318  #I do not understand why the keys are not normally ordered.
2319  nl=sorted(self.additionalOutputs.keys())
2320  for name in nl:
2321  output = self.additionalOutputs[name]
2322  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2323  tmpOut = cms.EndPath(output)
2324  setattr(self.process,name+'OutPath',tmpOut)
2325  self.schedule.append(tmpOut)
2326 
2327  # dump all additional commands
2328  self.pythonCfgCode += "\n# Other statements\n"
2329  for command in self.additionalCommands:
2330  self.pythonCfgCode += command + "\n"
2331 
2332  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2333  for object in self._options.inlineObjects.split(','):
2334  if not object:
2335  continue
2336  if not hasattr(self.process,object):
2337  print('cannot inline -'+object+'- : not known')
2338  else:
2339  self.pythonCfgCode +='\n'
2340  self.pythonCfgCode +=dumpPython(self.process,object)
2341 
2342  if self._options.pileup=='HiMixEmbGEN':
2343  self.pythonCfgCode += "\nprocess.generator.embeddingMode=cms.int32(1)\n"
2344 
2345  # dump all paths
2346  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2347  for path in self.process.paths:
2348  if getattr(self.process,path) not in self.blacklist_paths:
2349  self.pythonCfgCode += dumpPython(self.process,path)
2350 
2351  for endpath in self.process.endpaths:
2352  if getattr(self.process,endpath) not in self.blacklist_paths:
2353  self.pythonCfgCode += dumpPython(self.process,endpath)
2354 
2355  # dump the schedule
2356  self.pythonCfgCode += "\n# Schedule definition\n"
2357 
2358  # handling of the schedule
2359  pathNames = ['process.'+p.label_() for p in self.schedule]
2360  if self.process.schedule == None:
2361  self.process.schedule = cms.Schedule()
2362  for item in self.schedule:
2363  self.process.schedule.append(item)
2364  result = 'process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2365  else:
2366  if not isinstance(self.scheduleIndexOfFirstHLTPath, int):
2367  raise Exception('the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2368 
2369  for index, item in enumerate(self.schedule):
2370  if index < self.scheduleIndexOfFirstHLTPath:
2371  self.process.schedule.insert(index, item)
2372  else:
2373  self.process.schedule.append(item)
2374 
2375  result = "# process.schedule imported from cff in HLTrigger.Configuration\n"
2376  for index, item in enumerate(pathNames[:self.scheduleIndexOfFirstHLTPath]):
2377  result += 'process.schedule.insert('+str(index)+', '+item+')\n'
2378  if self.scheduleIndexOfFirstHLTPath < len(pathNames):
2379  result += 'process.schedule.extend(['+','.join(pathNames[self.scheduleIndexOfFirstHLTPath:])+'])\n'
2380 
2381  self.pythonCfgCode += result
2382 
2383  for labelToAssociate in self.labelsToAssociate:
2384  self.process.schedule.associate(getattr(self.process, labelToAssociate))
2385  self.pythonCfgCode += 'process.schedule.associate(process.' + labelToAssociate + ')\n'
2386 
2387  from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
2389  self.pythonCfgCode+="from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n"
2390  self.pythonCfgCode+="associatePatAlgosToolsTask(process)\n"
2391 
2392  overrideThreads = (self._options.nThreads != 1)
2393  overrideConcurrentLumis = (self._options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2394  overrideConcurrentIOVs = (self._options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2395 
2396  if overrideThreads or overrideConcurrentLumis or overrideConcurrentIOVs:
2397  self.pythonCfgCode +="\n"
2398  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2399  if overrideThreads:
2400  self.pythonCfgCode +="process.options.numberOfThreads = {}\n".format(self._options.nThreads)
2401  self.pythonCfgCode +="process.options.numberOfStreams = {}\n".format(self._options.nStreams)
2402  self.process.options.numberOfThreads = self._options.nThreads
2403  self.process.options.numberOfStreams = self._options.nStreams
2404  if overrideConcurrentLumis:
2405  self.pythonCfgCode +="process.options.numberOfConcurrentLuminosityBlocks = {}\n".format(self._options.nConcurrentLumis)
2406  self.process.options.numberOfConcurrentLuminosityBlocks = self._options.nConcurrentLumis
2407  if overrideConcurrentIOVs:
2408  self.pythonCfgCode +="process.options.eventSetup.numberOfConcurrentIOVs = {}\n".format(self._options.nConcurrentIOVs)
2409  self.process.options.eventSetup.numberOfConcurrentIOVs = self._options.nConcurrentIOVs
2410 
2411  if self._options.accelerators is not None:
2412  accelerators = self._options.accelerators.split(',')
2413  self.pythonCfgCode += "\n"
2414  self.pythonCfgCode += "# Enable only these accelerator backends\n"
2415  self.pythonCfgCode += "process.load('Configuration.StandardSequences.Accelerators_cff')\n"
2416  self.pythonCfgCode += "process.options.accelerators = ['" + "', '".join(accelerators) + "']\n"
2417  self.process.load('Configuration.StandardSequences.Accelerators_cff')
2418  self.process.options.accelerators = accelerators
2419 
2420  #repacked version
2421  if self._options.isRepacked:
2422  self.pythonCfgCode +="\n"
2423  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2424  self.pythonCfgCode +="MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n"
2425  MassReplaceInputTag(self.process, new="rawDataMapperByLabel", old="rawDataCollector")
2426 
2427  # special treatment in case of production filter sequence 2/2
2428  if self.productionFilterSequence and not (self._options.pileup=='HiMixEmbGEN'):
2429  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2430  self.pythonCfgCode +='for path in process.paths:\n'
2431  if len(self.conditionalPaths):
2432  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2433  if len(self.excludedPaths):
2434  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2435  self.pythonCfgCode +='\tgetattr(process,path).insert(0, process.%s)\n'%(self.productionFilterSequence,)
2436  pfs = getattr(self.process,self.productionFilterSequence)
2437  for path in self.process.paths:
2438  if not path in self.conditionalPaths: continue
2439  if path in self.excludedPaths: continue
2440  getattr(self.process,path).insert(0, pfs)
2441 
2442 
2443  # dump customise fragment
2444  self.pythonCfgCode += self.addCustomise()
2445 
2446  if self._options.runUnscheduled:
2447  print("--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2448  # Keep the "unscheduled customise functions" separate for now,
2449  # there are customize functions given by users (in our unit
2450  # tests) that need to be run before the "unscheduled customise
2451  # functions"
2452  self.pythonCfgCode += self.addCustomise(1)
2453 
2454  self.pythonCfgCode += self.addCustomiseCmdLine()
2455 
2456  if hasattr(self.process,"logErrorHarvester"):
2457  #configure logErrorHarvester to wait for same EDProducers to finish as the OutputModules
2458  self.pythonCfgCode +="\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n"
2459  self.pythonCfgCode +="from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n"
2460  self.pythonCfgCode +="process = customiseLogErrorHarvesterUsingOutputCommands(process)\n"
2461  from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands
2463 
2464  # Temporary hack to put the early delete customization after
2465  # everything else
2466  #
2467  # FIXME: remove when no longer needed
2468  self.pythonCfgCode += "\n# Add early deletion of temporary data products to reduce peak memory need\n"
2469  self.pythonCfgCode += "from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2470  self.pythonCfgCode += "process = customiseEarlyDelete(process)\n"
2471  self.pythonCfgCode += "# End adding early deletion\n"
2472  from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
2473  self.process = customiseEarlyDelete(self.process)
2474 
2475  imports = cms.specialImportRegistry.getSpecialImports()
2476  if len(imports) > 0:
2477  #need to inject this at the top
2478  index = self.pythonCfgCode.find("import FWCore.ParameterSet.Config")
2479  #now find the end of line
2480  index = self.pythonCfgCode.find("\n",index)
2481  self.pythonCfgCode = self.pythonCfgCode[:index]+ "\n" + "\n".join(imports)+"\n" +self.pythonCfgCode[index:]
2482 
2483 
2484  # make the .io file
2485 
2486  if self._options.io:
2487  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2488  if not self._options.io.endswith('.io'): self._option.io+='.io'
2489  io=open(self._options.io,'w')
2490  ioJson={}
2491  if hasattr(self.process.source,"fileNames"):
2492  if len(self.process.source.fileNames.value()):
2493  ioJson['primary']=self.process.source.fileNames.value()
2494  if hasattr(self.process.source,"secondaryFileNames"):
2495  if len(self.process.source.secondaryFileNames.value()):
2496  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2497  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2498  ioJson['pileup']=self._options.pileup_input[4:]
2499  for (o,om) in self.process.outputModules_().items():
2500  ioJson[o]=om.fileName.value()
2501  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2502  if self.productionFilterSequence:
2503  ioJson['filter']=self.productionFilterSequence
2504  import json
2505  io.write(json.dumps(ioJson))
2506  return
2507 
2508 
def load(self, includeFile)
def prepare_L1(self, stepSpec=None)
def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ='')
def expandMapping(self, seqList, mapping, index=None)
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT', verbosityLevel=1)
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:37
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
Definition: MassReplace.py:79
ALPAKA_FN_HOST_ACC ALPAKA_FN_INLINE constexpr float zip(ConstView const &tracks, int32_t i)
Definition: TracksSoA.h:90
inliner
load the relevant part
def expandNanoMapping(seqList, mapping, key)
Definition: autoNANO.py:1
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def prepare_CFWRITER(self, stepSpec=None)
def prepare_L1P2GT(self, stepSpec=None)
def prepare_RECOBEFMIX(self, stepSpec="reconstruction")
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
Definition: FindCaloHit.cc:19
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, stepSpec=None)
assert(be >=bs)
def build_production_info(self, evt_type, evtnumber)
def ProcessName(process)
Definition: CustomConfigs.py:6
def prepare_RECOSIM(self, stepSpec="recosim")
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def prepare_L1HwVal(self, stepSpec='L1HwVal')
def prepare_DIGI2RAW(self, stepSpec=None)
def prepare_POSTRECO(self, stepSpec=None)
def prepare_SKIM(self, stepSpec="all")
def prepare_ALCAPRODUCER(self, stepSpec=None)
def prepare_HARVESTING(self, stepSpec=None)
def prepare_ALCAOUTPUT(self, stepSpec=None)
def prepare_RAW2DIGI(self, stepSpec="RawToDigi")
def prepare_GEN(self, stepSpec=None)
def prepare_FILTER(self, stepSpec=None)
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
def prepare_PAT(self, stepSpec="miniAOD")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_L1Reco(self, stepSpec="L1Reco")
def prepare_HLT(self, stepSpec=None)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
Definition: HCMethods.h:50
def prepare_DIGI(self, stepSpec=None)
def loadAndRemember(self, includeFile)
def prepare_ENDJOB(self, stepSpec='endOfProcess')
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_DQM(self, stepSpec='DQMOffline')
def prepare_ALCAHARVEST(self, stepSpec=None)
static std::string join(char **cmd)
Definition: RemoteFile.cc:21
def prepare_USER(self, stepSpec=None)
def prepare_ALCA(self, stepSpec=None, workflow='full')
def defineMixing(dict)
Definition: Mixing.py:210
def dumpPython(process, name)
def miniAOD_customizeOutput(out)
def encode(args, files)
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
Definition: helpers.py:24
def prepare_REPACK(self, stepSpec=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_L1REPACK(self, stepSpec=None)
def prepare_L1TrackTrigger(self, stepSpec="L1TrackTrigger")
def prepare_RAW2RECO(self, stepSpec=None)
def prepare_NANO(self, stepSpec='')
def prepare_VALIDATION(self, stepSpec='validation')
def lumi_to_run(runs, events_in_sample, events_per_job)
Definition: LumiToRun.py:1
def scheduleSequenceAtEnd(self, seq, prefix)
#define str(s)
def prepare_RECO(self, stepSpec="reconstruction")
def prepare_SIM(self, stepSpec=None)
def filesFromList(fileName, s=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def prepare_PATGEN(self, stepSpec="miniGEN")
def prepare_LHE(self, stepSpec=None)
def prepare_DATAMIX(self, stepSpec=None)
def executeAndRemember(self, command)
nextScheduleIsConditional
put the filtering path in the schedule