CMS 3D CMS Logo

ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python3
2 
3 __version__ = "$Revision: 1.19 $"
4 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
5 
6 import FWCore.ParameterSet.Config as cms
7 from FWCore.ParameterSet.Modules import _Module
8 # The following import is provided for backward compatibility reasons.
9 # The function used to be defined in this file.
10 from FWCore.ParameterSet.MassReplace import massReplaceInputTag as MassReplaceInputTag
11 
12 import hashlib
13 import sys
14 import re
15 import collections
16 from subprocess import Popen,PIPE
17 import FWCore.ParameterSet.DictTypes as DictTypes
18 from FWCore.ParameterSet.OrderedSet import OrderedSet
19 class Options:
20  pass
21 
22 # the canonical defaults
23 defaultOptions = Options()
24 defaultOptions.datamix = 'DataOnSim'
25 defaultOptions.isMC=False
26 defaultOptions.isData=True
27 defaultOptions.step=''
28 defaultOptions.pileup='NoPileUp'
29 defaultOptions.pileup_input = None
30 defaultOptions.pileup_dasoption = ''
31 defaultOptions.geometry = 'SimDB'
32 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
33 defaultOptions.magField = ''
34 defaultOptions.conditions = None
35 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
36 defaultOptions.harvesting= 'AtRunEnd'
37 defaultOptions.gflash = False
38 defaultOptions.number = -1
39 defaultOptions.number_out = None
40 defaultOptions.arguments = ""
41 defaultOptions.name = "NO NAME GIVEN"
42 defaultOptions.evt_type = ""
43 defaultOptions.filein = ""
44 defaultOptions.dasquery=""
45 defaultOptions.dasoption=""
46 defaultOptions.secondfilein = ""
47 defaultOptions.customisation_file = []
48 defaultOptions.customisation_file_unsch = []
49 defaultOptions.customise_commands = ""
50 defaultOptions.inline_custom=False
51 defaultOptions.particleTable = 'pythiapdt'
52 defaultOptions.particleTableList = ['pythiapdt','pdt']
53 defaultOptions.dirin = ''
54 defaultOptions.dirout = ''
55 defaultOptions.filetype = 'EDM'
56 defaultOptions.fileout = 'output.root'
57 defaultOptions.filtername = ''
58 defaultOptions.lazy_download = False
59 defaultOptions.custom_conditions = ''
60 defaultOptions.hltProcess = ''
61 defaultOptions.eventcontent = None
62 defaultOptions.datatier = None
63 defaultOptions.inlineEventContent = True
64 defaultOptions.inlineObjects =''
65 defaultOptions.hideGen=False
66 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
67 defaultOptions.beamspot=None
68 defaultOptions.outputDefinition =''
69 defaultOptions.inputCommands = None
70 defaultOptions.outputCommands = None
71 defaultOptions.inputEventContent = ''
72 defaultOptions.dropDescendant = False
73 defaultOptions.relval = None
74 defaultOptions.prefix = None
75 defaultOptions.profile = None
76 defaultOptions.heap_profile = None
77 defaultOptions.maxmem_profile = None
78 defaultOptions.isRepacked = False
79 defaultOptions.restoreRNDSeeds = False
80 defaultOptions.donotDropOnInput = ''
81 defaultOptions.python_filename =''
82 defaultOptions.io=None
83 defaultOptions.lumiToProcess=None
84 defaultOptions.fast=False
85 defaultOptions.runsAndWeightsForMC = None
86 defaultOptions.runsScenarioForMC = None
87 defaultOptions.runsAndWeightsForMCIntegerWeights = None
88 defaultOptions.runsScenarioForMCIntegerWeights = None
89 defaultOptions.runUnscheduled = False
90 defaultOptions.timeoutOutput = False
91 defaultOptions.nThreads = 1
92 defaultOptions.nStreams = 0
93 defaultOptions.nConcurrentLumis = 0
94 defaultOptions.nConcurrentIOVs = 0
95 defaultOptions.accelerators = None
96 
97 # some helper routines
98 def dumpPython(process,name):
99  theObject = getattr(process,name)
100  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
101  return "process."+name+" = " + theObject.dumpPython()
102  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
103  return "process."+name+" = " + theObject.dumpPython()+"\n"
104  else:
105  return "process."+name+" = " + theObject.dumpPython()+"\n"
106 def filesFromList(fileName,s=None):
107  import os
108  import FWCore.ParameterSet.Config as cms
109  prim=[]
110  sec=[]
111  for line in open(fileName,'r'):
112  if line.count(".root")>=2:
113  #two files solution...
114  entries=line.replace("\n","").split()
115  prim.append(entries[0])
116  sec.append(entries[1])
117  elif (line.find(".root")!=-1):
118  entry=line.replace("\n","")
119  prim.append(entry)
120  # remove any duplicates but keep the order
121  file_seen = set()
122  prim = [f for f in prim if not (f in file_seen or file_seen.add(f))]
123  file_seen = set()
124  sec = [f for f in sec if not (f in file_seen or file_seen.add(f))]
125  if s:
126  if not hasattr(s,"fileNames"):
127  s.fileNames=cms.untracked.vstring(prim)
128  else:
129  s.fileNames.extend(prim)
130  if len(sec)!=0:
131  if not hasattr(s,"secondaryFileNames"):
132  s.secondaryFileNames=cms.untracked.vstring(sec)
133  else:
134  s.secondaryFileNames.extend(sec)
135  print("found files: ",prim)
136  if len(prim)==0:
137  raise Exception("There are not files in input from the file list")
138  if len(sec)!=0:
139  print("found parent files:",sec)
140  return (prim,sec)
141 
142 def filesFromDASQuery(query,option="",s=None):
143  import os,time
144  import FWCore.ParameterSet.Config as cms
145  prim=[]
146  sec=[]
147  print("the query is",query)
148  eC=5
149  count=0
150  while eC!=0 and count<3:
151  if count!=0:
152  print('Sleeping, then retrying DAS')
153  time.sleep(100)
154  p = Popen('dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=True, universal_newlines=True)
155  pipe=p.stdout.read()
156  tupleP = os.waitpid(p.pid, 0)
157  eC=tupleP[1]
158  count=count+1
159  if eC==0:
160  print("DAS succeeded after",count,"attempts",eC)
161  else:
162  print("DAS failed 3 times- I give up")
163  for line in pipe.split('\n'):
164  if line.count(".root")>=2:
165  #two files solution...
166  entries=line.replace("\n","").split()
167  prim.append(entries[0])
168  sec.append(entries[1])
169  elif (line.find(".root")!=-1):
170  entry=line.replace("\n","")
171  prim.append(entry)
172  # remove any duplicates
173  prim = sorted(list(set(prim)))
174  sec = sorted(list(set(sec)))
175  if s:
176  if not hasattr(s,"fileNames"):
177  s.fileNames=cms.untracked.vstring(prim)
178  else:
179  s.fileNames.extend(prim)
180  if len(sec)!=0:
181  if not hasattr(s,"secondaryFileNames"):
182  s.secondaryFileNames=cms.untracked.vstring(sec)
183  else:
184  s.secondaryFileNames.extend(sec)
185  print("found files: ",prim)
186  if len(sec)!=0:
187  print("found parent files:",sec)
188  return (prim,sec)
189 
190 def anyOf(listOfKeys,dict,opt=None):
191  for k in listOfKeys:
192  if k in dict:
193  toReturn=dict[k]
194  dict.pop(k)
195  return toReturn
196  if opt!=None:
197  return opt
198  else:
199  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
200 
202  """The main building routines """
203 
204  def __init__(self, options, process = None, with_output = False, with_input = False ):
205  """options taken from old cmsDriver and optparse """
206 
207  options.outfile_name = options.dirout+options.fileout
208 
209  self._options = options
210 
211  if self._options.isData and options.isMC:
212  raise Exception("ERROR: You may specify only --data or --mc, not both")
213  #if not self._options.conditions:
214  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
215 
216  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
217  if 'ENDJOB' in self._options.step:
218  if (hasattr(self._options,"outputDefinition") and \
219  self._options.outputDefinition != '' and \
220  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
221  (hasattr(self._options,"datatier") and \
222  self._options.datatier and \
223  'DQMIO' in self._options.datatier):
224  print("removing ENDJOB from steps since not compatible with DQMIO dataTier")
225  self._options.step=self._options.step.replace(',ENDJOB','')
226 
227 
228 
229  # what steps are provided by this class?
230  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
231  self.stepMap={}
232  self.stepKeys=[]
233  for step in self._options.step.split(","):
234  if step=='': continue
235  stepParts = step.split(":")
236  stepName = stepParts[0]
237  if stepName not in stepList and not stepName.startswith('re'):
238  raise ValueError("Step {} unknown. Available are {}".format( stepName , sorted(stepList)))
239  if len(stepParts)==1:
240  self.stepMap[stepName]=""
241  elif len(stepParts)==2:
242  self.stepMap[stepName]=stepParts[1].split('+')
243  elif len(stepParts)==3:
244  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
245  else:
246  raise ValueError(f"Step definition {step} invalid")
247  self.stepKeys.append(stepName)
248 
249  #print(f"map of steps is: {self.stepMap}")
250 
251  self.with_output = with_output
252  self.process=process
253 
254  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
255  self.with_output = False
256  self.with_input = with_input
257  self.imports = []
258  self.create_process()
259  self.define_Configs()
260  self.schedule = list()
262 
263  # we are doing three things here:
264  # creating a process to catch errors
265  # building the code to re-create the process
266 
268  # TODO: maybe a list of to be dumped objects would help as well
269  self.blacklist_paths = []
270  self.addedObjects = []
272 
278 
279  def profileOptions(self):
280  """
281  addIgProfService
282  Function to add the igprof profile service so that you can dump in the middle
283  of the run.
284  """
285  profileOpts = self._options.profile.split(':')
286  profilerStart = 1
287  profilerInterval = 100
288  profilerFormat = None
289  profilerJobFormat = None
290 
291  if len(profileOpts):
292  #type, given as first argument is unused here
293  profileOpts.pop(0)
294  if len(profileOpts):
295  startEvent = profileOpts.pop(0)
296  if not startEvent.isdigit():
297  raise Exception("%s is not a number" % startEvent)
298  profilerStart = int(startEvent)
299  if len(profileOpts):
300  eventInterval = profileOpts.pop(0)
301  if not eventInterval.isdigit():
302  raise Exception("%s is not a number" % eventInterval)
303  profilerInterval = int(eventInterval)
304  if len(profileOpts):
305  profilerFormat = profileOpts.pop(0)
306 
307 
308  if not profilerFormat:
309  profilerFormat = "%s___%s___%%I.gz" % (
310  self._options.evt_type.replace("_cfi", ""),
311  hashlib.md5(
312  (str(self._options.step) + str(self._options.pileup) + str(self._options.conditions) +
313  str(self._options.datatier) + str(self._options.profileTypeLabel)).encode('utf-8')
314  ).hexdigest()
315  )
316  if not profilerJobFormat and profilerFormat.endswith(".gz"):
317  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
318  elif not profilerJobFormat:
319  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
320 
321  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
322 
324  """
325  addJeProfService
326  Function to add the jemalloc heap profile service so that you can dump in the middle
327  of the run.
328  """
329  profileOpts = []
330  profilerStart = 1
331  profilerInterval = 100
332  profilerFormat = "jeprof_%s.heap"
333  profilerJobFormat = None
334 
335 
336  if not profilerJobFormat and profilerFormat.endswith(".heap"):
337  profilerJobFormat = profilerFormat.replace(".heap", "_EndOfJob.heap")
338  elif not profilerJobFormat:
339  profilerJobFormat = profilerFormat + "_EndOfJob.heap"
340 
341  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
342 
343  def load(self,includeFile):
344  includeFile = includeFile.replace('/','.')
345  self.process.load(includeFile)
346  return sys.modules[includeFile]
347 
348  def loadAndRemember(self, includeFile):
349  """helper routine to load am memorize imports"""
350  # we could make the imports a on-the-fly data method of the process instance itself
351  # not sure if the latter is a good idea
352  includeFile = includeFile.replace('/','.')
353  self.imports.append(includeFile)
354  self.process.load(includeFile)
355  return sys.modules[includeFile]
356 
357  def executeAndRemember(self, command):
358  """helper routine to remember replace statements"""
359  self.additionalCommands.append(command)
360  if not command.strip().startswith("#"):
361  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
362  import re
363  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
364  #exec(command.replace("process.","self.process."))
365 
366  def addCommon(self):
367  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
368  self.process.options.Rethrow = ['ProductNotFound']
369  self.process.options.fileMode = 'FULLMERGE'
370 
371  self.addedObjects.append(("","options"))
372 
373  if self._options.lazy_download:
374  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
375  stats = cms.untracked.bool(True),
376  enable = cms.untracked.bool(True),
377  cacheHint = cms.untracked.string("lazy-download"),
378  readHint = cms.untracked.string("read-ahead-buffered")
379  )
380  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
381 
382  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
383  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
384 
385  if self._options.profile:
386  (start, interval, eventFormat, jobFormat)=self.profileOptions()
387  self.process.IgProfService = cms.Service("IgProfService",
388  reportFirstEvent = cms.untracked.int32(start),
389  reportEventInterval = cms.untracked.int32(interval),
390  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
391  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
392  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
393 
394  if self._options.heap_profile:
395  (start, interval, eventFormat, jobFormat)=self.heapProfileOptions()
396  self.process.JeProfService = cms.Service("JeProfService",
397  reportFirstEvent = cms.untracked.int32(start),
398  reportEventInterval = cms.untracked.int32(interval),
399  reportToFileAtPostEvent = cms.untracked.string("%s"%(eventFormat)),
400  reportToFileAtPostEndJob = cms.untracked.string("%s"%(jobFormat)))
401  self.addedObjects.append(("Setup JeProf Service for heap profiling","JeProfService"))
402 
403  def addMaxEvents(self):
404  """Here we decide how many evts will be processed"""
405  self.process.maxEvents.input = self._options.number
406  if self._options.number_out:
407  self.process.maxEvents.output = self._options.number_out
408  self.addedObjects.append(("","maxEvents"))
409 
410  def addSource(self):
411  """Here the source is built. Priority: file, generator"""
412  self.addedObjects.append(("Input source","source"))
413 
414  def filesFromOption(self):
415  for entry in self._options.filein.split(','):
416  print("entry",entry)
417  if entry.startswith("filelist:"):
418  filesFromList(entry[9:],self.process.source)
419  elif entry.startswith("dbs:") or entry.startswith("das:"):
420  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
421  else:
422  self.process.source.fileNames.append(self._options.dirin+entry)
423  if self._options.secondfilein:
424  if not hasattr(self.process.source,"secondaryFileNames"):
425  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
426  for entry in self._options.secondfilein.split(','):
427  print("entry",entry)
428  if entry.startswith("filelist:"):
429  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
430  elif entry.startswith("dbs:") or entry.startswith("das:"):
431  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
432  else:
433  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
434 
435  if self._options.filein or self._options.dasquery:
436  if self._options.filetype == "EDM":
437  self.process.source=cms.Source("PoolSource",
438  fileNames = cms.untracked.vstring(),
439  secondaryFileNames= cms.untracked.vstring())
440  filesFromOption(self)
441  elif self._options.filetype == "DAT":
442  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
443  filesFromOption(self)
444  elif self._options.filetype == "LHE":
445  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
446  if self._options.filein.startswith("lhe:"):
447  #list the article directory automatically
448  args=self._options.filein.split(':')
449  article=args[1]
450  print('LHE input from article ',article)
451  location='/store/lhe/'
452  import os
453  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
454  for line in textOfFiles:
455  for fileName in [x for x in line.split() if '.lhe' in x]:
456  self.process.source.fileNames.append(location+article+'/'+fileName)
457  #check first if list of LHE files is loaded (not empty)
458  if len(line)<2:
459  print('Issue to load LHE files, please check and try again.')
460  sys.exit(-1)
461  #Additional check to protect empty fileNames in process.source
462  if len(self.process.source.fileNames)==0:
463  print('Issue with empty filename, but can pass line check')
464  sys.exit(-1)
465  if len(args)>2:
466  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
467  else:
468  filesFromOption(self)
469 
470  elif self._options.filetype == "DQM":
471  self.process.source=cms.Source("DQMRootSource",
472  fileNames = cms.untracked.vstring())
473  filesFromOption(self)
474 
475  elif self._options.filetype == "DQMDAQ":
476  # FIXME: how to configure it if there are no input files specified?
477  self.process.source=cms.Source("DQMStreamerReader")
478 
479 
480  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
481  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
482 
483  if self._options.dasquery!='':
484  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
485  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
486 
487  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
488  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
489 
490 
491  if 'GEN' in self.stepMap.keys() and not self._options.filetype == "LHE":
492  if self._options.inputCommands:
493  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
494  else:
495  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
496 
497  if self.process.source and self._options.inputCommands and not self._options.filetype == "LHE":
498  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
499  for command in self._options.inputCommands.split(','):
500  # remove whitespace around the keep/drop statements
501  command = command.strip()
502  if command=='': continue
503  self.process.source.inputCommands.append(command)
504  if not self._options.dropDescendant:
505  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
506 
507  if self._options.lumiToProcess:
508  import FWCore.PythonUtilities.LumiList as LumiList
509  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
510 
511  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
512  if self.process.source is None:
513  self.process.source=cms.Source("EmptySource")
514 
515  # modify source in case of run-dependent MC
516  self.runsAndWeights=None
517  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
518  if not self._options.isMC :
519  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
520  if self._options.runsAndWeightsForMC:
521  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
522  else:
523  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
524  if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
525  __import__(RunsAndWeights[self._options.runsScenarioForMC])
526  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
527  else:
528  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
529 
530  if self.runsAndWeights:
531  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
533  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
534  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
535 
536  # modify source in case of run-dependent MC (Run-3 method)
538  if self._options.runsAndWeightsForMCIntegerWeights or self._options.runsScenarioForMCIntegerWeights:
539  if not self._options.isMC :
540  raise Exception("options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
541  if self._options.runsAndWeightsForMCIntegerWeights:
542  self.runsAndWeightsInt = eval(self._options.runsAndWeightsForMCIntegerWeights)
543  else:
544  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
545  if isinstance(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights], str):
546  __import__(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights])
547  self.runsAndWeightsInt = sys.modules[RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
548  else:
549  self.runsAndWeightsInt = RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]
550 
551  if self.runsAndWeightsInt:
552  if not self._options.relval:
553  raise Exception("--relval option required when using --runsAndWeightsInt")
554  if 'DATAMIX' in self._options.step:
555  from SimGeneral.Configuration.LumiToRun import lumi_to_run
556  total_events, events_per_job = self._options.relval.split(',')
557  lumi_to_run_mapping = lumi_to_run(self.runsAndWeightsInt, int(total_events), int(events_per_job))
558  self.additionalCommands.append("process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " + str(lumi_to_run_mapping) + "])")
559 
560  return
561 
562  def addOutput(self):
563  """ Add output module to the process """
564  result=""
565  if self._options.outputDefinition:
566  if self._options.datatier:
567  print("--datatier & --eventcontent options ignored")
568 
569  #new output convention with a list of dict
570  outList = eval(self._options.outputDefinition)
571  for (id,outDefDict) in enumerate(outList):
572  outDefDictStr=outDefDict.__str__()
573  if not isinstance(outDefDict,dict):
574  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
575  #requires option: tier
576  theTier=anyOf(['t','tier','dataTier'],outDefDict)
577  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
578 
579  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
580  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
581  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
582  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
583  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
584  # module label has a particular role
585  if not theModuleLabel:
586  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
587  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
588  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
589  ]
590  for name in tryNames:
591  if not hasattr(self.process,name):
592  theModuleLabel=name
593  break
594  if not theModuleLabel:
595  raise Exception("cannot find a module label for specification: "+outDefDictStr)
596  if id==0:
597  defaultFileName=self._options.outfile_name
598  else:
599  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
600 
601  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
602  if not theFileName.endswith('.root'):
603  theFileName+='.root'
604 
605  if len(outDefDict):
606  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
607  if theStreamType=='DQMIO': theStreamType='DQM'
608  if theStreamType=='ALL':
609  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
610  else:
611  theEventContent = getattr(self.process, theStreamType+"EventContent")
612 
613 
614  addAlCaSelects=False
615  if theStreamType=='ALCARECO' and not theFilterName:
616  theFilterName='StreamALCACombined'
617  addAlCaSelects=True
618 
619  CppType='PoolOutputModule'
620  if self._options.timeoutOutput:
621  CppType='TimeoutPoolOutputModule'
622  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
623  output = cms.OutputModule(CppType,
624  theEventContent.clone(),
625  fileName = cms.untracked.string(theFileName),
626  dataset = cms.untracked.PSet(
627  dataTier = cms.untracked.string(theTier),
628  filterName = cms.untracked.string(theFilterName))
629  )
630  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
631  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
632  if not theSelectEvent and hasattr(self.process,'filtering_step'):
633  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
634  if theSelectEvent:
635  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
636 
637  if addAlCaSelects:
638  if not hasattr(output,'SelectEvents'):
639  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
640  for alca in self.AlCaPaths:
641  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
642 
643 
644  if hasattr(self.process,theModuleLabel):
645  raise Exception("the current process already has a module "+theModuleLabel+" defined")
646  #print "creating output module ",theModuleLabel
647  setattr(self.process,theModuleLabel,output)
648  outputModule=getattr(self.process,theModuleLabel)
649  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
650  path=getattr(self.process,theModuleLabel+'_step')
651  self.schedule.append(path)
652 
653  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
654  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"): return label
655  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
656  if theExtraOutputCommands:
657  if not isinstance(theExtraOutputCommands,list):
658  raise Exception("extra ouput command in --option must be a list of strings")
659  if hasattr(self.process,theStreamType+"EventContent"):
660  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
661  else:
662  outputModule.outputCommands.extend(theExtraOutputCommands)
663 
664  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
665 
666 
667  return result
668 
669  streamTypes=self._options.eventcontent.split(',')
670  tiers=self._options.datatier.split(',')
671  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
672  raise Exception("number of event content arguments does not match number of datatier arguments")
673 
674  # if the only step is alca we don't need to put in an output
675  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
676  return "\n"
677 
678  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
679  if streamType=='': continue
680  if streamType == 'ALCARECO' and not 'ALCAPRODUCER' in self._options.step: continue
681  if streamType=='DQMIO': streamType='DQM'
682  eventContent=streamType
683 
684  if streamType == "NANOEDMAOD" :
685  eventContent = "NANOAOD"
686  elif streamType == "NANOEDMAODSIM" :
687  eventContent = "NANOAODSIM"
688  theEventContent = getattr(self.process, eventContent+"EventContent")
689  if i==0:
690  theFileName=self._options.outfile_name
691  theFilterName=self._options.filtername
692  else:
693  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
694  theFilterName=self._options.filtername
695  CppType='PoolOutputModule'
696  if self._options.timeoutOutput:
697  CppType='TimeoutPoolOutputModule'
698  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
699  if "NANOAOD" in streamType : CppType='NanoAODOutputModule'
700  output = cms.OutputModule(CppType,
701  theEventContent,
702  fileName = cms.untracked.string(theFileName),
703  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
704  filterName = cms.untracked.string(theFilterName)
705  )
706  )
707  if hasattr(self.process,"generation_step") and streamType!='LHE':
708  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
709  if hasattr(self.process,"filtering_step"):
710  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
711 
712  if streamType=='ALCARECO':
713  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
714 
715  if "MINIAOD" in streamType:
716  from PhysicsTools.PatAlgos.slimming.miniAOD_tools import miniAOD_customizeOutput
718 
719  outputModuleName=streamType+'output'
720  setattr(self.process,outputModuleName,output)
721  outputModule=getattr(self.process,outputModuleName)
722  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
723  path=getattr(self.process,outputModuleName+'_step')
724  self.schedule.append(path)
725 
726  if self._options.outputCommands and streamType!='DQM':
727  for evct in self._options.outputCommands.split(','):
728  if not evct: continue
729  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
730 
731  if not self._options.inlineEventContent:
732  tmpstreamType=streamType
733  if "NANOEDM" in tmpstreamType :
734  tmpstreamType=tmpstreamType.replace("NANOEDM","NANO")
735  def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
736  return label
737  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
738 
739  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
740 
741  return result
742 
743  def addStandardSequences(self):
744  """
745  Add selected standard sequences to the process
746  """
747  # load the pile up file
748  if self._options.pileup:
749  pileupSpec=self._options.pileup.split(',')[0]
750 
751  #make sure there is a set of pileup files specified when needed
752  pileups_without_input=[defaultOptions.pileup,"Cosmics","default","HiMixNoPU",None]
753  if self._options.pileup not in pileups_without_input and self._options.pileup_input==None:
754  message = "Pileup scenerio requires input files. Please add an appropriate --pileup_input option"
755  raise Exception(message)
756 
757  # Does the requested pile-up scenario exist?
758  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
759  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
760  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
761  raise Exception(message)
762 
763  # Put mixing parameters in a dictionary
764  if '.' in pileupSpec:
765  mixingDict={'file':pileupSpec}
766  elif pileupSpec.startswith('file:'):
767  mixingDict={'file':pileupSpec[5:]}
768  else:
769  import copy
770  mixingDict=copy.copy(Mixing[pileupSpec])
771  if len(self._options.pileup.split(','))>1:
772  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
773 
774  # Load the pu cfg file corresponding to the requested pu scenario
775  if 'file:' in pileupSpec:
776  #the file is local
777  self.process.load(mixingDict['file'])
778  print("inlining mixing module configuration")
779  self._options.inlineObjects+=',mix'
780  else:
781  self.loadAndRemember(mixingDict['file'])
782 
783  mixingDict.pop('file')
784  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
785  if self._options.pileup_input:
786  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
787  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
788  elif self._options.pileup_input.startswith("filelist:"):
789  mixingDict['F']=(filesFromList(self._options.pileup_input[9:]))[0]
790  else:
791  mixingDict['F']=self._options.pileup_input.split(',')
792  specialization=defineMixing(mixingDict)
793  for command in specialization:
794  self.executeAndRemember(command)
795  if len(mixingDict)!=0:
796  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
797 
798 
799  # load the geometry file
800  try:
801  if len(self.stepMap):
802  self.loadAndRemember(self.GeometryCFF)
803  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
805  if self.geometryDBLabel:
806  self.executeAndRemember('if hasattr(process, "XMLFromDBSource"): process.XMLFromDBSource.label="%s"'%(self.geometryDBLabel))
807  self.executeAndRemember('if hasattr(process, "DDDetectorESProducerFromDB"): process.DDDetectorESProducerFromDB.label="%s"'%(self.geometryDBLabel))
808 
809  except ImportError:
810  print("Geometry option",self._options.geometry,"unknown.")
811  raise
812 
813  if len(self.stepMap):
814  self.loadAndRemember(self.magFieldCFF)
815 
816  for stepName in self.stepKeys:
817  stepSpec = self.stepMap[stepName]
818  print("Step:", stepName,"Spec:",stepSpec)
819  if stepName.startswith('re'):
820 
821  if stepName[2:] not in self._options.donotDropOnInput:
822  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
823  stepName=stepName[2:]
824  if stepSpec=="":
825  getattr(self,"prepare_"+stepName)(stepSpec = getattr(self,stepName+"DefaultSeq"))
826  elif isinstance(stepSpec, list):
827  getattr(self,"prepare_"+stepName)(stepSpec = '+'.join(stepSpec))
828  elif isinstance(stepSpec, tuple):
829  getattr(self,"prepare_"+stepName)(stepSpec = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
830  else:
831  raise ValueError("Invalid step definition")
832 
833  if self._options.restoreRNDSeeds!=False:
834  #it is either True, or a process name
835  if self._options.restoreRNDSeeds==True:
836  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
837  else:
838  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
839  if self._options.inputEventContent or self._options.inputCommands:
840  if self._options.inputCommands:
841  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
842  else:
843  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
844 
845 
846  def completeInputCommand(self):
847  if self._options.inputEventContent:
848  import copy
849  def dropSecondDropStar(iec):
850  #drop occurence of 'drop *' in the list
851  count=0
852  for item in iec:
853  if item=='drop *':
854  if count!=0:
855  iec.remove(item)
856  count+=1
857 
858 
859  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
860  for evct in self._options.inputEventContent.split(','):
861  if evct=='': continue
862  theEventContent = getattr(self.process, evct+"EventContent")
863  if hasattr(theEventContent,'outputCommands'):
864  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
865  if hasattr(theEventContent,'inputCommands'):
866  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
867 
868  dropSecondDropStar(self.process.source.inputCommands)
869 
870  if not self._options.dropDescendant:
871  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
872 
873 
874  return
875 
876  def addConditions(self):
877  """Add conditions to the process"""
878  if not self._options.conditions: return
879 
880  if 'FrontierConditions_GlobalTag' in self._options.conditions:
881  print('using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
882  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
883 
885  from Configuration.AlCa.GlobalTag import GlobalTag
886  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
887  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
888  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
889 
890 
891  def addCustomise(self,unsch=0):
892  """Include the customise code """
893 
894  custOpt=[]
895  if unsch==0:
896  for c in self._options.customisation_file:
897  custOpt.extend(c.split(","))
898  else:
899  for c in self._options.customisation_file_unsch:
900  custOpt.extend(c.split(","))
901 
902  custMap=DictTypes.SortedKeysDict()
903  for opt in custOpt:
904  if opt=='': continue
905  if opt.count('.')>1:
906  raise Exception("more than . in the specification:"+opt)
907  fileName=opt.split('.')[0]
908  if opt.count('.')==0: rest='customise'
909  else:
910  rest=opt.split('.')[1]
911  if rest=='py': rest='customise' #catch the case of --customise file.py
912 
913  if fileName in custMap:
914  custMap[fileName].extend(rest.split('+'))
915  else:
916  custMap[fileName]=rest.split('+')
917 
918  if len(custMap)==0:
919  final_snippet='\n'
920  else:
921  final_snippet='\n# customisation of the process.\n'
922 
923  allFcn=[]
924  for opt in custMap:
925  allFcn.extend(custMap[opt])
926  for fcn in allFcn:
927  if allFcn.count(fcn)!=1:
928  raise Exception("cannot specify twice "+fcn+" as a customisation method")
929 
930  for f in custMap:
931  # let python search for that package and do syntax checking at the same time
932  packageName = f.replace(".py","").replace("/",".")
933  __import__(packageName)
934  package = sys.modules[packageName]
935 
936  # now ask the package for its definition and pick .py instead of .pyc
937  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
938 
939  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
940  if self._options.inline_custom:
941  for line in file(customiseFile,'r'):
942  if "import FWCore.ParameterSet.Config" in line:
943  continue
944  final_snippet += line
945  else:
946  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
947  for fcn in custMap[f]:
948  print("customising the process with",fcn,"from",f)
949  if not hasattr(package,fcn):
950  #bound to fail at run time
951  raise Exception("config "+f+" has no function "+fcn)
952  #execute the command
953  self.process=getattr(package,fcn)(self.process)
954  #and print it in the configuration
955  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
956  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
957 
958  if len(custMap)!=0:
959  final_snippet += '\n# End of customisation functions\n'
960 
961 
962  return final_snippet
963 
964  def addCustomiseCmdLine(self):
965  final_snippet='\n# Customisation from command line\n'
966  if self._options.customise_commands:
967  import string
968  for com in self._options.customise_commands.split('\\n'):
969  com=com.lstrip()
970  self.executeAndRemember(com)
971  final_snippet +='\n'+com
972 
973  return final_snippet
974 
975  #----------------------------------------------------------------------------
976  # here the methods to define the python includes for each step or
977  # conditions
978  #----------------------------------------------------------------------------
979  def define_Configs(self):
980  if len(self.stepMap):
981  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
982  if self._options.particleTable not in defaultOptions.particleTableList:
983  print('Invalid particle table provided. Options are:')
984  print(defaultOptions.particleTable)
985  sys.exit(-1)
986  else:
987  if len(self.stepMap):
988  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
989 
990  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
991 
992  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
993  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
994  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
995  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
996  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
997  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
998  self.L1P2GTDefaultCFF = 'Configuration/StandardSequences/SimPhase2L1GlobalTriggerEmulator_cff'
999  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
1000  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
1001  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
1002  if self._options.isRepacked: self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_DataMapper_cff"
1003  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
1004  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
1005  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
1006  self.RECOSIMDefaultCFF="Configuration/StandardSequences/RecoSim_cff"
1007  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
1008  self.NANODefaultCFF="PhysicsTools/NanoAOD/nano_cff"
1009  self.NANOGENDefaultCFF="PhysicsTools/NanoAOD/nanogen_cff"
1010  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
1011  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
1012  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
1013  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
1014  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
1015  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
1016  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
1017  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
1018  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
1019  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
1020  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
1022  if "DATAMIX" in self.stepMap.keys():
1023  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
1024  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
1025  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
1026  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
1027 
1028  self.ALCADefaultSeq=None
1029  self.LHEDefaultSeq='externalLHEProducer'
1030  self.GENDefaultSeq='pgen'
1031  self.SIMDefaultSeq='psim'
1032  self.DIGIDefaultSeq='pdigi'
1034  self.DIGI2RAWDefaultSeq='DigiToRaw'
1035  self.HLTDefaultSeq='GRun'
1036  self.L1DefaultSeq=None
1042  self.RAW2DIGIDefaultSeq='RawToDigi'
1043  self.L1RecoDefaultSeq='L1Reco'
1044  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
1045  if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
1046  self.RECODefaultSeq='reconstruction'
1047  else:
1048  self.RECODefaultSeq='reconstruction_fromRECO'
1049  self.RECOSIMDefaultSeq='recosim'
1051  self.L1HwValDefaultSeq='L1HwVal'
1052  self.DQMDefaultSeq='DQMOffline'
1054  self.ENDJOBDefaultSeq='endOfProcess'
1055  self.REPACKDefaultSeq='DigiToRawRepack'
1056  self.PATDefaultSeq='miniAOD'
1057  self.PATGENDefaultSeq='miniGEN'
1058  #TODO: Check based of file input
1059  self.NANOGENDefaultSeq='nanogenSequence'
1060  self.NANODefaultSeq='nanoSequence'
1061  self.NANODefaultCustom='nanoAOD_customizeCommon'
1063  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
1065  if not self._options.beamspot:
1066  self._options.beamspot=VtxSmearedDefaultKey
1067 
1068  # if its MC then change the raw2digi
1069  if self._options.isMC==True:
1070  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
1071  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
1072  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
1073  self.PATGENDefaultCFF="Configuration/StandardSequences/PATGEN_cff"
1074  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
1075  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
1076  self.NANODefaultSeq='nanoSequenceMC'
1077  else:
1078  self._options.beamspot = None
1079 
1080  #patch for gen, due to backward incompatibility
1081  if 'reGEN' in self.stepMap:
1082  self.GENDefaultSeq='fixGenInfo'
1083 
1084  if self._options.scenario=='cosmics':
1085  self._options.pileup='Cosmics'
1086  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1087  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1088  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1089  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1090  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1091  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1092  if self._options.isMC==True:
1093  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1094  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1095  self.RECODefaultSeq='reconstructionCosmics'
1096  self.DQMDefaultSeq='DQMOfflineCosmics'
1097 
1098  if self._options.scenario=='HeavyIons':
1099  if not self._options.beamspot:
1100  self._options.beamspot=VtxSmearedHIDefaultKey
1101  self.HLTDefaultSeq = 'HIon'
1102  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1103  self.VALIDATIONDefaultSeq=''
1104  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1105  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
1106  self.RECODefaultSeq='reconstruction'
1107  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1108  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1109  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1110  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1111  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1112  if self._options.isMC==True:
1113  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1114 
1115 
1118  self.USERDefaultSeq='user'
1119  self.USERDefaultCFF=None
1121  # the magnetic field
1122  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1123  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1124 
1125  # the geometry
1126  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1128  simGeometry=''
1129  if self._options.fast:
1130  if 'start' in self._options.conditions.lower():
1131  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1132  else:
1133  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1134  else:
1135  def inGeometryKeys(opt):
1136  from Configuration.StandardSequences.GeometryConf import GeometryConf
1137  if opt in GeometryConf:
1138  return GeometryConf[opt]
1139  else:
1140  return opt
1141 
1142  geoms=self._options.geometry.split(',')
1143  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1144  if len(geoms)==2:
1145  #may specify the reco geometry
1146  if '/' in geoms[1] or '_cff' in geoms[1]:
1147  self.GeometryCFF=geoms[1]
1148  else:
1149  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1150 
1151  if (geoms[0].startswith('DB:')):
1152  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1153  self.geometryDBLabel=geoms[0][3:]
1154  print("with DB:")
1155  else:
1156  if '/' in geoms[0] or '_cff' in geoms[0]:
1157  self.SimGeometryCFF=geoms[0]
1158  else:
1159  simGeometry=geoms[0]
1160  if self._options.gflash==True:
1161  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1162  else:
1163  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1164 
1165  # synchronize the geometry configuration and the FullSimulation sequence to be used
1166  if simGeometry not in defaultOptions.geometryExtendedOptions:
1167  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1168 
1169  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1170  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1171  self._options.beamspot='NoSmear'
1172 
1173  # fastsim requires some changes to the default cff files and sequences
1174  if self._options.fast:
1175  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1176  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1177  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1178  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1179  self.NANODefaultSeq = 'nanoSequenceFS'
1180  self.DQMOFFLINEDefaultCFF="DQMOffline.Configuration.DQMOfflineFS_cff"
1181 
1182  # Mixing
1183  if self._options.pileup=='default':
1184  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1185  self._options.pileup=MixingDefaultKey
1186 
1187 
1188  #not driven by a default cff anymore
1189  if self._options.isData:
1190  self._options.pileup=None
1191 
1192 
1195  # for alca, skims, etc
1196  def addExtraStream(self, name, stream, workflow='full'):
1197  # define output module and go from there
1198  output = cms.OutputModule("PoolOutputModule")
1199  if stream.selectEvents.parameters_().__len__()!=0:
1200  output.SelectEvents = stream.selectEvents
1201  else:
1202  output.SelectEvents = cms.untracked.PSet()
1203  output.SelectEvents.SelectEvents=cms.vstring()
1204  if isinstance(stream.paths,tuple):
1205  for path in stream.paths:
1206  output.SelectEvents.SelectEvents.append(path.label())
1207  else:
1208  output.SelectEvents.SelectEvents.append(stream.paths.label())
1209 
1210 
1211 
1212  if isinstance(stream.content,str):
1213  evtPset=getattr(self.process,stream.content)
1214  for p in evtPset.parameters_():
1215  setattr(output,p,getattr(evtPset,p))
1216  if not self._options.inlineEventContent:
1217  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1218  return label
1219  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1220  else:
1221  output.outputCommands = stream.content
1222 
1223 
1224  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1225 
1226  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1227  filterName = cms.untracked.string(stream.name))
1228 
1229  if self._options.filtername:
1230  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1231 
1232  #add an automatic flushing to limit memory consumption
1233  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1234 
1235  if workflow in ("producers,full"):
1236  if isinstance(stream.paths,tuple):
1237  for path in stream.paths:
1238  self.schedule.append(path)
1239  else:
1240  self.schedule.append(stream.paths)
1241 
1242 
1243  # in case of relvals we don't want to have additional outputs
1244  if (not self._options.relval) and workflow in ("full","output"):
1245  self.additionalOutputs[name] = output
1246  setattr(self.process,name,output)
1247 
1248  if workflow == 'output':
1249  # adjust the select events to the proper trigger results from previous process
1250  filterList = output.SelectEvents.SelectEvents
1251  for i, filter in enumerate(filterList):
1252  filterList[i] = filter+":"+self._options.triggerResultsProcess
1253 
1254  return output
1255 
1256  #----------------------------------------------------------------------------
1257  # here the methods to create the steps. Of course we are doing magic here ;)
1258  # prepare_STEPNAME modifies self.process and what else's needed.
1259  #----------------------------------------------------------------------------
1260 
1261  def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ=''):
1262  _dotsplit = stepSpec.split('.')
1263  if ( len(_dotsplit)==1 ):
1264  if '/' in _dotsplit[0]:
1265  _sequence = defaultSEQ if defaultSEQ else stepSpec
1266  _cff = _dotsplit[0]
1267  else:
1268  _sequence = stepSpec
1269  _cff = defaultCFF
1270  elif ( len(_dotsplit)==2 ):
1271  _cff,_sequence = _dotsplit
1272  else:
1273  print("sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1274  print(stepSpec,"not recognized")
1275  raise
1276  l=self.loadAndRemember(_cff)
1277  return l,_sequence,_cff
1278 
1279  def scheduleSequence(self,seq,prefix,what='Path'):
1280  if '*' in seq:
1281  #create only one path with all sequences in it
1282  for i,s in enumerate(seq.split('*')):
1283  if i==0:
1284  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1285  else:
1286  p=getattr(self.process,prefix)
1287  tmp = getattr(self.process, s)
1288  if isinstance(tmp, cms.Task):
1289  p.associate(tmp)
1290  else:
1291  p+=tmp
1292  self.schedule.append(getattr(self.process,prefix))
1293  return
1294  else:
1295  #create as many path as many sequences
1296  if not '+' in seq:
1297  if self.nextScheduleIsConditional:
1298  self.conditionalPaths.append(prefix)
1299  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1300  self.schedule.append(getattr(self.process,prefix))
1301  else:
1302  for i,s in enumerate(seq.split('+')):
1303  sn=prefix+'%d'%(i)
1304  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1305  self.schedule.append(getattr(self.process,sn))
1306  return
1307 
1308  def scheduleSequenceAtEnd(self,seq,prefix):
1309  self.scheduleSequence(seq,prefix,what='EndPath')
1310  return
1311 
1312  def prepare_ALCAPRODUCER(self, stepSpec = None):
1313  self.prepare_ALCA(stepSpec, workflow = "producers")
1314 
1315  def prepare_ALCAOUTPUT(self, stepSpec = None):
1316  self.prepare_ALCA(stepSpec, workflow = "output")
1317 
1318  def prepare_ALCA(self, stepSpec = None, workflow = 'full'):
1319  """ Enrich the process with alca streams """
1320  alcaConfig,sequence,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.ALCADefaultCFF)
1321 
1322  MAXLEN=31 #the alca producer name should be shorter than 31 chars as per https://cms-talk.web.cern.ch/t/alcaprompt-datasets-not-loaded-in-dbs/11146/2
1323  # decide which ALCA paths to use
1324  alcaList = sequence.split("+")
1325  for alca in alcaList:
1326  if (len(alca)>MAXLEN):
1327  raise Exception("The following alca "+str(alca)+" name (with length "+str(len(alca))+" chars) cannot be accepted because it exceeds the DBS constraints on the length of the name of the ALCARECOs producers ("+str(MAXLEN)+")!")
1328 
1329  maxLevel=0
1330  from Configuration.AlCa.autoAlca import autoAlca, AlCaNoConcurrentLumis
1331  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1332  self.expandMapping(alcaList,autoAlca)
1333  self.AlCaPaths=[]
1334  for name in alcaConfig.__dict__:
1335  alcastream = getattr(alcaConfig,name)
1336  shortName = name.replace('ALCARECOStream','')
1337  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1338  if shortName in AlCaNoConcurrentLumis:
1339  print("Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".format(shortName))
1340  self._options.nConcurrentLumis = 1
1341  self._options.nConcurrentIOVs = 1
1342  output = self.addExtraStream(name,alcastream, workflow = workflow)
1343  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1344  self.AlCaPaths.append(shortName)
1345  if 'DQM' in alcaList:
1346  if not self._options.inlineEventContent and hasattr(self.process,name):
1347  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1348  else:
1349  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1350 
1351  #rename the HLT process name in the alca modules
1352  if self._options.hltProcess or 'HLT' in self.stepMap:
1353  if isinstance(alcastream.paths,tuple):
1354  for path in alcastream.paths:
1355  self.renameHLTprocessInSequence(path.label())
1356  else:
1357  self.renameHLTprocessInSequence(alcastream.paths.label())
1358 
1359  for i in range(alcaList.count(shortName)):
1360  alcaList.remove(shortName)
1361 
1362  # DQM needs a special handling
1363  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1364  path = getattr(alcaConfig,name)
1365  self.schedule.append(path)
1366  alcaList.remove('DQM')
1367 
1368  if isinstance(alcastream,cms.Path):
1369  #black list the alca path so that they do not appear in the cfg
1370  self.blacklist_paths.append(alcastream)
1371 
1372 
1373  if len(alcaList) != 0:
1374  available=[]
1375  for name in alcaConfig.__dict__:
1376  alcastream = getattr(alcaConfig,name)
1377  if isinstance(alcastream,cms.FilteredStream):
1378  available.append(name.replace('ALCARECOStream',''))
1379  print("The following alcas could not be found "+str(alcaList))
1380  print("available ",available)
1381  #print "verify your configuration, ignoring for now"
1382  raise Exception("The following alcas could not be found "+str(alcaList))
1383 
1384  def prepare_LHE(self, stepSpec = None):
1385  #load the fragment
1386 
1387  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1388  print("Loading lhe fragment from",loadFragment)
1389  __import__(loadFragment)
1390  self.process.load(loadFragment)
1391 
1392  self._options.inlineObjects+=','+stepSpec
1393 
1394  getattr(self.process,stepSpec).nEvents = self._options.number
1395 
1396  #schedule it
1397  self.process.lhe_step = cms.Path( getattr( self.process,stepSpec) )
1398  self.excludedPaths.append("lhe_step")
1399  self.schedule.append( self.process.lhe_step )
1400 
1401  def prepare_GEN(self, stepSpec = None):
1402  """ load the fragment of generator configuration """
1403  loadFailure=False
1404  #remove trailing .py
1405  #support old style .cfi by changing into something.cfi into something_cfi
1406  #remove python/ from the name
1407  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1408  #standard location of fragments
1409  if not '/' in loadFragment:
1410  loadFragment='Configuration.Generator.'+loadFragment
1411  else:
1412  loadFragment=loadFragment.replace('/','.')
1413  try:
1414  print("Loading generator fragment from",loadFragment)
1415  __import__(loadFragment)
1416  except:
1417  loadFailure=True
1418  #if self.process.source and self.process.source.type_()=='EmptySource':
1419  if not (self._options.filein or self._options.dasquery):
1420  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1421 
1422  if not loadFailure:
1423  from Configuration.Generator.concurrentLumisDisable import noConcurrentLumiGenerators
1424 
1425  generatorModule=sys.modules[loadFragment]
1426  genModules=generatorModule.__dict__
1427  #remove lhe producer module since this should have been
1428  #imported instead in the LHE step
1429  if self.LHEDefaultSeq in genModules:
1430  del genModules[self.LHEDefaultSeq]
1431 
1432  if self._options.hideGen:
1433  self.loadAndRemember(loadFragment)
1434  else:
1435  self.process.load(loadFragment)
1436  # expose the objects from that fragment to the configuration
1437  import FWCore.ParameterSet.Modules as cmstypes
1438  for name in genModules:
1439  theObject = getattr(generatorModule,name)
1440  if isinstance(theObject, cmstypes._Module):
1441  self._options.inlineObjects=name+','+self._options.inlineObjects
1442  if theObject.type_() in noConcurrentLumiGenerators:
1443  print("Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".format(theObject.type_()))
1444  self._options.nConcurrentLumis = 1
1445  self._options.nConcurrentIOVs = 1
1446  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1447  self._options.inlineObjects+=','+name
1448 
1449  if stepSpec == self.GENDefaultSeq or stepSpec == 'pgen_genonly':
1450  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1451  self.productionFilterSequence = 'ProductionFilterSequence'
1452  elif 'generator' in genModules:
1453  self.productionFilterSequence = 'generator'
1454 
1455  """ Enrich the schedule with the rest of the generation step """
1456  _,_genSeqName,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.GENDefaultCFF)
1457 
1458  if True:
1459  try:
1460  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1461  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1462  self.loadAndRemember(cffToBeLoaded)
1463  except ImportError:
1464  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1465 
1466  if self._options.scenario == 'HeavyIons':
1467  if self._options.pileup=='HiMixGEN':
1468  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1469  elif self._options.pileup=='HiMixEmbGEN':
1470  self.loadAndRemember("Configuration/StandardSequences/GeneratorEmbMix_cff")
1471  else:
1472  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1473 
1474  self.process.generation_step = cms.Path( getattr(self.process,_genSeqName) )
1475  self.schedule.append(self.process.generation_step)
1476 
1477  #register to the genstepfilter the name of the path (static right now, but might evolve)
1478  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1479 
1480  if 'reGEN' in self.stepMap or stepSpec == 'pgen_smear':
1481  #stop here
1482  return
1483 
1484  """ Enrich the schedule with the summary of the filter step """
1485  #the gen filter in the endpath
1486  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1487  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1488  return
1489 
1490  def prepare_SIM(self, stepSpec = None):
1491  """ Enrich the schedule with the simulation step"""
1492  _,_simSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.SIMDefaultCFF)
1493  if not self._options.fast:
1494  if self._options.gflash==True:
1495  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1496 
1497  if self._options.magField=='0T':
1498  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1499  else:
1500  if self._options.magField=='0T':
1501  self.executeAndRemember("process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1502 
1503  self.scheduleSequence(_simSeq,'simulation_step')
1504  return
1505 
1506  def prepare_DIGI(self, stepSpec = None):
1507  """ Enrich the schedule with the digitisation step"""
1508  _,_digiSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DIGIDefaultCFF)
1509 
1510  if self._options.gflash==True:
1511  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1512 
1513  if _digiSeq == 'pdigi_valid' or _digiSeq == 'pdigi_hi':
1514  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1515 
1516  if _digiSeq != 'pdigi_nogen' and _digiSeq != 'pdigi_valid_nogen' and _digiSeq != 'pdigi_hi_nogen' and not self.process.source.type_()=='EmptySource' and not self._options.filetype == "LHE":
1517  if self._options.inputEventContent=='':
1518  self._options.inputEventContent='REGEN'
1519  else:
1520  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1521 
1522 
1523  self.scheduleSequence(_digiSeq,'digitisation_step')
1524  return
1525 
1526  def prepare_CFWRITER(self, stepSpec = None):
1527  """ Enrich the schedule with the crossing frame writer step"""
1529  self.scheduleSequence('pcfw','cfwriter_step')
1530  return
1531 
1532  def prepare_DATAMIX(self, stepSpec = None):
1533  """ Enrich the schedule with the digitisation step"""
1535  self.scheduleSequence('pdatamix','datamixing_step')
1536 
1537  if self._options.pileup_input:
1538  theFiles=''
1539  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1540  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1541  elif self._options.pileup_input.startswith("filelist:"):
1542  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1543  else:
1544  theFiles=self._options.pileup_input.split(',')
1545  #print theFiles
1546  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1547 
1548  return
1549 
1550  def prepare_DIGI2RAW(self, stepSpec = None):
1551  _,_digi2rawSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DIGI2RAWDefaultCFF)
1552  self.scheduleSequence(_digi2rawSeq,'digi2raw_step')
1553  return
1554 
1555  def prepare_REPACK(self, stepSpec = None):
1556  _,_repackSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.REPACKDefaultCFF)
1557  self.scheduleSequence(_repackSeq,'digi2repack_step')
1558  return
1559 
1560  def loadPhase2GTMenu(self, menuFile: str):
1561  import importlib
1562  menuPath = f'L1Trigger.Configuration.Phase2GTMenus.{menuFile}'
1563  menuModule = importlib.import_module(menuPath)
1564 
1565  theMenu = menuModule.menu
1566  triggerPaths = [] #we get a list of paths in each of these files to schedule
1567 
1568  for triggerPathFile in theMenu:
1569  self.loadAndRemember(triggerPathFile) #this load and remember will set the algo variable of the algoblock later
1570 
1571  triggerPathModule = importlib.import_module(triggerPathFile)
1572  for objName in dir(triggerPathModule):
1573  obj = getattr(triggerPathModule, objName)
1574  objType = type(obj)
1575  if objType == cms.Path:
1576  triggerPaths.append(objName)
1577 
1578  triggerScheduleList = [getattr(self.process, name) for name in triggerPaths] #get the actual paths to put in the schedule
1579  self.schedule.extend(triggerScheduleList) #put them in the schedule for later
1580 
1581  # create the L1 GT step
1582  # We abuse the stepSpec a bit as a way to specify a menu
1583  def prepare_L1P2GT(self, stepSpec=None):
1584  """ Run the GT emulation sequence on top of the L1 emulation step """
1586  self.scheduleSequence('l1tGTProducerSequence', 'Phase2L1GTProducer')
1587  self.scheduleSequence('l1tGTAlgoBlockProducerSequence', 'Phase2L1GTAlgoBlockProducer')
1588  if stepSpec == None:
1589  defaultMenuFile = "prototype_2023_v1_0_0"
1590  self.loadPhase2GTMenu(menuFile = defaultMenuFile)
1591  else:
1592  self.loadPhase2GTMenu(menuFile = stepSpec)
1593 
1594  def prepare_L1(self, stepSpec = None):
1595  """ Enrich the schedule with the L1 simulation step"""
1596  assert(stepSpec == None)
1597  self.loadAndRemember(self.L1EMDefaultCFF)
1598  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1599  return
1600 
1601  def prepare_L1REPACK(self, stepSpec = None):
1602  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1603  supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT','CalouGT']
1604  if stepSpec in supported:
1605  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'% stepSpec)
1606  if self._options.scenario == 'HeavyIons':
1607  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1608  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1609  else:
1610  print("L1REPACK with '",stepSpec,"' is not supported! Supported choices are: ",supported)
1611  raise Exception('unsupported feature')
1612 
1613  def prepare_HLT(self, stepSpec = None):
1614  """ Enrich the schedule with the HLT simulation step"""
1615  if not stepSpec:
1616  print("no specification of the hlt menu has been given, should never happen")
1617  raise Exception('no HLT specifications provided')
1618 
1619  if '@' in stepSpec:
1620  # case where HLT:@something was provided
1621  from Configuration.HLT.autoHLT import autoHLT
1622  key = stepSpec[1:]
1623  if key in autoHLT:
1624  stepSpec = autoHLT[key]
1625  else:
1626  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1627 
1628  if ',' in stepSpec:
1629  #case where HLT:something:something was provided
1630  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1631  optionsForHLT = {}
1632  if self._options.scenario == 'HeavyIons':
1633  optionsForHLT['type'] = 'HIon'
1634  else:
1635  optionsForHLT['type'] = 'GRun'
1636  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.items())
1637  if stepSpec == 'run,fromSource':
1638  if hasattr(self.process.source,'firstRun'):
1639  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1640  elif hasattr(self.process.source,'setRunNumber'):
1641  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1642  else:
1643  raise Exception(f'Cannot replace menu to load {stepSpec}')
1644  else:
1645  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(stepSpec.replace(',',':'),optionsForHLTConfig))
1646  else:
1647  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % stepSpec)
1648 
1649  if self._options.isMC:
1650  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1651 
1652  if self._options.name != 'HLT':
1653  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1654  self.additionalCommands.append('process = ProcessName(process)')
1655  self.additionalCommands.append('')
1656  from HLTrigger.Configuration.CustomConfigs import ProcessName
1657  self.process = ProcessName(self.process)
1658 
1659  if self.process.schedule == None:
1660  raise Exception('the HLT step did not attach a valid schedule to the process')
1661 
1662  self.scheduleIndexOfFirstHLTPath = len(self.schedule)
1663  [self.blacklist_paths.append(path) for path in self.process.schedule if isinstance(path,(cms.Path,cms.EndPath))]
1664 
1665  # this is a fake, to be removed with fastim migration and HLT menu dump
1666  if self._options.fast:
1667  if not hasattr(self.process,'HLTEndSequence'):
1668  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1669 
1670 
1671  def prepare_RAW2RECO(self, stepSpec = None):
1672  if ','in stepSpec:
1673  seqReco,seqDigi=stepSpec.spli(',')
1674  else:
1675  print(f"RAW2RECO requires two specifications {stepSpec} insufficient")
1676 
1677  self.prepare_RAW2DIGI(seqDigi)
1678  self.prepare_RECO(seqReco)
1679  return
1680 
1681  def prepare_RAW2DIGI(self, stepSpec = "RawToDigi"):
1682  _,_raw2digiSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RAW2DIGIDefaultCFF)
1683  self.scheduleSequence(_raw2digiSeq,'raw2digi_step')
1684  return
1685 
1686  def prepare_PATFILTER(self, stepSpec = None):
1687  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1688  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1689  for filt in allMetFilterPaths:
1690  self.schedule.append(getattr(self.process,'Flag_'+filt))
1691 
1692  def prepare_L1HwVal(self, stepSpec = 'L1HwVal'):
1693  ''' Enrich the schedule with L1 HW validation '''
1694  self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1HwValDefaultCFF)
1695  print('\n\n\n DEPRECATED this has no action \n\n\n')
1696  return
1697 
1698  def prepare_L1Reco(self, stepSpec = "L1Reco"):
1699  ''' Enrich the schedule with L1 reconstruction '''
1700  _,_l1recoSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1RecoDefaultCFF)
1701  self.scheduleSequence(_l1recoSeq,'L1Reco_step')
1702  return
1703 
1704  def prepare_L1TrackTrigger(self, stepSpec = "L1TrackTrigger"):
1705  ''' Enrich the schedule with L1 reconstruction '''
1706  _,_l1tracktriggerSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.L1TrackTriggerDefaultCFF)
1707  self.scheduleSequence(_l1tracktriggerSeq,'L1TrackTrigger_step')
1708  return
1709 
1710  def prepare_FILTER(self, stepSpec = None):
1711  ''' Enrich the schedule with a user defined filter sequence '''
1712 
1713  filterConfig,filterSeq = stepSpec.split('.')
1714  filterConfig=self.load(filterConfig)
1715 
1716  class PrintAllModules(object):
1717  def __init__(self):
1718  self.inliner=''
1719  pass
1720  def enter(self,visitee):
1721  try:
1722  label=visitee.label()
1723 
1724  self.inliner=label+','+self.inliner
1725  except:
1726  pass
1727  def leave(self,v): pass
1728 
1729  expander=PrintAllModules()
1730  getattr(self.process,filterSeq).visit( expander )
1731  self._options.inlineObjects+=','+expander.inliner
1732  self._options.inlineObjects+=','+filterSeq
1733 
1734 
1735  self.scheduleSequence(filterSeq,'filtering_step')
1736  self.nextScheduleIsConditional=True
1737 
1738  self.productionFilterSequence = filterSeq
1739 
1740  return
1741 
1742  def prepare_RECO(self, stepSpec = "reconstruction"):
1743  ''' Enrich the schedule with reconstruction '''
1744  _,_recoSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RECODefaultCFF)
1745  self.scheduleSequence(_recoSeq,'reconstruction_step')
1746  return
1747 
1748  def prepare_RECOSIM(self, stepSpec = "recosim"):
1749  ''' Enrich the schedule with reconstruction '''
1750  _,_recosimSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.RECOSIMDefaultCFF)
1751  self.scheduleSequence(_recosimSeq,'recosim_step')
1752  return
1753 
1754  def prepare_RECOBEFMIX(self, stepSpec = "reconstruction"):
1755  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1756  if not self._options.fast:
1757  print("ERROR: this step is only implemented for FastSim")
1758  sys.exit()
1759  _,_recobefmixSeq,_ = self.loadDefaultOrSpecifiedCFF(self.RECOBEFMIXDefaultSeq,self.RECOBEFMIXDefaultCFF)
1760  self.scheduleSequence(_recobefmixSeq,'reconstruction_befmix_step')
1761  return
1762 
1763  def prepare_PAT(self, stepSpec = "miniAOD"):
1764  ''' Enrich the schedule with PAT '''
1765  self.prepare_PATFILTER(self)
1766  self.loadDefaultOrSpecifiedCFF(stepSpec,self.PATDefaultCFF)
1767  self.labelsToAssociate.append('patTask')
1768  if self._options.isData:
1769  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1770  else:
1771  if self._options.fast:
1772  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1773  else:
1774  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1775 
1776  if self._options.hltProcess:
1777  if len(self._options.customise_commands) > 1:
1778  self._options.customise_commands = self._options.customise_commands + " \n"
1779  self._options.customise_commands = self._options.customise_commands + "process.patTrigger.processName = \""+self._options.hltProcess+"\"\n"
1780  self._options.customise_commands = self._options.customise_commands + "process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1781  self._options.customise_commands = self._options.customise_commands + "process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1782 
1783 # self.renameHLTprocessInSequence(sequence)
1784 
1785  return
1786 
1787  def prepare_PATGEN(self, stepSpec = "miniGEN"):
1788  ''' Enrich the schedule with PATGEN '''
1789  self.loadDefaultOrSpecifiedCFF(stepSpec,self.PATGENDefaultCFF) #this is unscheduled
1790  self.labelsToAssociate.append('patGENTask')
1791  if self._options.isData:
1792  raise Exception("PATGEN step can only run on MC")
1793  return
1794 
1795  def prepare_NANO(self, stepSpec = '' ):
1796  print(f"in prepare_nano {stepSpec}")
1797  ''' Enrich the schedule with NANO '''
1798  if not '@' in stepSpec:
1799  _,_nanoSeq,_nanoCff = self.loadDefaultOrSpecifiedCFF(stepSpec,self.NANODefaultCFF,self.NANODefaultSeq)
1800  else:
1801  _nanoSeq = stepSpec
1802  _nanoCff = self.NANODefaultCFF
1803 
1804  print(_nanoSeq)
1805  # create full specified sequence using autoNANO
1806  from PhysicsTools.NanoAOD.autoNANO import autoNANO, expandNanoMapping
1807  # if not a autoNANO mapping, load an empty customization, which later will be converted into the default.
1808  _nanoCustoms = _nanoSeq.split('+') if '@' in stepSpec else ['']
1809  _nanoSeq = _nanoSeq.split('+')
1810  expandNanoMapping(_nanoSeq, autoNANO, 'sequence')
1811  expandNanoMapping(_nanoCustoms, autoNANO, 'customize')
1812  # make sure there are no duplicates while preserving the ordering
1813  _nanoSeq = list(sorted(set(_nanoSeq), key=_nanoSeq.index))
1814  _nanoCustoms = list(sorted(set(_nanoCustoms), key=_nanoCustoms.index))
1815  # replace empty sequence with default
1816  _nanoSeq = [seq if seq!='' else f"{self.NANODefaultCFF}.{self.NANODefaultSeq}" for seq in _nanoSeq]
1817  _nanoCustoms = [cust if cust!='' else self.NANODefaultCustom for cust in _nanoCustoms]
1818  # build and inject the sequence
1819  if len(_nanoSeq) < 1 and '@' in stepSpec:
1820  raise Exception(f'The specified mapping: {stepSpec} generates an empty NANO sequence. Please provide a valid mapping')
1821  _seqToSchedule = []
1822  for _subSeq in _nanoSeq:
1823  if '.' in _subSeq:
1824  _cff,_seq = _subSeq.split('.')
1825  print("NANO: scheduling:",_seq,"from",_cff)
1826  self.loadAndRemember(_cff)
1827  _seqToSchedule.append(_seq)
1828  elif '/' in _subSeq:
1829  self.loadAndRemember(_subSeq)
1830  _seqToSchedule.append(self.NANODefaultSeq)
1831  else:
1832  print("NANO: scheduling:",_subSeq)
1833  _seqToSchedule.append(_subSeq)
1834  self.scheduleSequence('+'.join(_seqToSchedule), 'nanoAOD_step')
1835 
1836  # add the customisations
1837  for custom in _nanoCustoms:
1838  custom_path = custom if '.' in custom else '.'.join([_nanoCff,custom])
1839  # customization order can be important for NANO, here later specified customise take precedence
1840  self._options.customisation_file.append(custom_path)
1841  if self._options.hltProcess:
1842  if len(self._options.customise_commands) > 1:
1843  self._options.customise_commands = self._options.customise_commands + " \n"
1844  self._options.customise_commands = self._options.customise_commands + "process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1845 
1846  def prepare_NANOGEN(self, stepSpec = "nanoAOD"):
1847  ''' Enrich the schedule with NANOGEN '''
1848  # TODO: Need to modify this based on the input file type
1849  fromGen = any([x in self.stepMap for x in ['LHE', 'GEN', 'AOD']])
1850  _,_nanogenSeq,_nanogenCff = self.loadDefaultOrSpecifiedCFF(stepSpec,self.NANOGENDefaultCFF)
1851  self.scheduleSequence(_nanogenSeq,'nanoAOD_step')
1852  custom = "customizeNanoGEN" if fromGen else "customizeNanoGENFromMini"
1853  if self._options.runUnscheduled:
1854  self._options.customisation_file_unsch.insert(0, '.'.join([_nanogenCff, custom]))
1855  else:
1856  self._options.customisation_file.insert(0, '.'.join([_nanogenCff, custom]))
1857 
1858  def prepare_SKIM(self, stepSpec = "all"):
1859  ''' Enrich the schedule with skimming fragments'''
1860  skimConfig,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.SKIMDefaultCFF)
1861 
1862  stdHLTProcName = 'HLT'
1863  newHLTProcName = self._options.hltProcess
1864  customiseForReHLT = (newHLTProcName or (stdHLTProcName in self.stepMap)) and (newHLTProcName != stdHLTProcName)
1865  if customiseForReHLT:
1866  print("replacing %s process name - step SKIM:%s will use '%s'" % (stdHLTProcName, sequence, newHLTProcName))
1867 
1868 
1869  from Configuration.Skimming.autoSkim import autoSkim
1870  skimlist = sequence.split('+')
1871  self.expandMapping(skimlist,autoSkim)
1872 
1873  #print("dictionary for skims:", skimConfig.__dict__)
1874  for skim in skimConfig.__dict__:
1875  skimstream = getattr(skimConfig, skim)
1876 
1877  # blacklist AlCa paths so that they do not appear in the cfg
1878  if isinstance(skimstream, cms.Path):
1879  self.blacklist_paths.append(skimstream)
1880  # if enabled, apply "hltProcess" renaming to Sequences
1881  elif isinstance(skimstream, cms.Sequence):
1882  if customiseForReHLT:
1883  self.renameHLTprocessInSequence(skim, proc = newHLTProcName, HLTprocess = stdHLTProcName, verbosityLevel = 0)
1884 
1885  if not isinstance(skimstream, cms.FilteredStream):
1886  continue
1887 
1888  shortname = skim.replace('SKIMStream','')
1889  if (sequence=="all"):
1890  self.addExtraStream(skim,skimstream)
1891  elif (shortname in skimlist):
1892  self.addExtraStream(skim,skimstream)
1893  #add a DQM eventcontent for this guy
1894  if self._options.datatier=='DQM':
1895  self.process.load(self.EVTCONTDefaultCFF)
1896  skimstreamDQM = cms.FilteredStream(
1897  responsible = skimstream.responsible,
1898  name = skimstream.name+'DQM',
1899  paths = skimstream.paths,
1900  selectEvents = skimstream.selectEvents,
1901  content = self._options.datatier+'EventContent',
1902  dataTier = cms.untracked.string(self._options.datatier)
1903  )
1904  self.addExtraStream(skim+'DQM',skimstreamDQM)
1905  for i in range(skimlist.count(shortname)):
1906  skimlist.remove(shortname)
1907 
1908  if (skimlist.__len__()!=0 and sequence!="all"):
1909  print('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1910  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1911 
1912 
1913  def prepare_USER(self, stepSpec = None):
1914  ''' Enrich the schedule with a user defined sequence '''
1915  _,_userSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.USERDefaultCFF)
1916  self.scheduleSequence(_userSeq,'user_step')
1917  return
1918 
1919  def prepare_POSTRECO(self, stepSpec = None):
1920  """ Enrich the schedule with the postreco step """
1922  self.scheduleSequence('postreco_generator','postreco_step')
1923  return
1924 
1925 
1926  def prepare_VALIDATION(self, stepSpec = 'validation'):
1927  print(f"{stepSpec} in preparing validation")
1928  _,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.VALIDATIONDefaultCFF)
1929  from Validation.Configuration.autoValidation import autoValidation
1930  #in case VALIDATION:something:somethingelse -> something,somethingelse
1931  if sequence.find(',')!=-1:
1932  prevalSeqName=sequence.split(',')[0].split('+')
1933  valSeqName=sequence.split(',')[1].split('+')
1934  self.expandMapping(prevalSeqName,autoValidation,index=0)
1935  self.expandMapping(valSeqName,autoValidation,index=1)
1936  else:
1937  if '@' in sequence:
1938  prevalSeqName=sequence.split('+')
1939  valSeqName=sequence.split('+')
1940  self.expandMapping(prevalSeqName,autoValidation,index=0)
1941  self.expandMapping(valSeqName,autoValidation,index=1)
1942  else:
1943  postfix=''
1944  if sequence:
1945  postfix='_'+sequence
1946  prevalSeqName=['prevalidation'+postfix]
1947  valSeqName=['validation'+postfix]
1948  if not hasattr(self.process,valSeqName[0]):
1949  prevalSeqName=['']
1950  valSeqName=[sequence]
1951 
1952  def NFI(index):
1953 
1954  if index==0:
1955  return ''
1956  else:
1957  return '%s'%index
1958 
1959 
1960  #rename the HLT process in validation steps
1961  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1962  for s in valSeqName+prevalSeqName:
1963  if s:
1965  for (i,s) in enumerate(prevalSeqName):
1966  if s:
1967  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1968  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1969 
1970  for (i,s) in enumerate(valSeqName):
1971  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1972  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1973 
1974  #needed in case the miniAODValidation sequence is run starting from AODSIM
1975  if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1976  return
1977 
1978  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1979  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1980  self._options.restoreRNDSeeds=True
1981 
1982  if not 'DIGI' in self.stepMap and not self._options.isData and not self._options.fast:
1983  self.executeAndRemember("process.mix.playback = True")
1984  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1985  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1986  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1987 
1988  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1989  #will get in the schedule, smoothly
1990  for (i,s) in enumerate(valSeqName):
1991  getattr(self.process,'validation_step%s'%NFI(i)).insert(0, self.process.genstepfilter)
1992 
1993  return
1994 
1995 
1997  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1998  It will climb down within PSets, VPSets and VInputTags to find its target"""
1999  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
2000  self._paramReplace = paramReplace
2001  self._paramSearch = paramSearch
2002  self._verbose = verbose
2003  self._whitelist = whitelist
2005  def doIt(self, pset, base):
2006  if isinstance(pset, cms._Parameterizable):
2007  for name in pset.parameters_().keys():
2008  # skip whitelisted parameters
2009  if name in self._whitelist:
2010  continue
2011  # if I use pset.parameters_().items() I get copies of the parameter values
2012  # so I can't modify the nested pset
2013  value = getattr(pset, name)
2014  valueType = type(value)
2015  if valueType in [cms.PSet, cms.untracked.PSet, cms.EDProducer]:
2016  self.doIt(value,base+"."+name)
2017  elif valueType in [cms.VPSet, cms.untracked.VPSet]:
2018  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
2019  elif valueType in [cms.string, cms.untracked.string]:
2020  if value.value() == self._paramSearch:
2021  if self._verbose: print("set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace))
2022  setattr(pset, name,self._paramReplace)
2023  elif valueType in [cms.VInputTag, cms.untracked.VInputTag]:
2024  for (i,n) in enumerate(value):
2025  if not isinstance(n, cms.InputTag):
2026  n=cms.InputTag(n)
2027  if n.processName == self._paramSearch:
2028  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
2029  if self._verbose:print("set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace))
2030  setattr(n,"processName",self._paramReplace)
2031  value[i]=n
2032  elif valueType in [cms.vstring, cms.untracked.vstring]:
2033  for (i,n) in enumerate(value):
2034  if n==self._paramSearch:
2035  getattr(pset,name)[i]=self._paramReplace
2036  elif valueType in [cms.InputTag, cms.untracked.InputTag]:
2037  if value.processName == self._paramSearch:
2038  if self._verbose: print("set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace))
2039  setattr(getattr(pset, name),"processName",self._paramReplace)
2040 
2041  def enter(self,visitee):
2042  label = ''
2043  try:
2044  label = visitee.label()
2045  except AttributeError:
2046  label = '<Module not in a Process>'
2047  except:
2048  label = 'other execption'
2049  self.doIt(visitee, label)
2050 
2051  def leave(self,visitee):
2052  pass
2053 
2054  #visit a sequence to repalce all input tags
2055  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
2056  print("Replacing all InputTag %s => %s"%(oldT,newT))
2057  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
2058  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
2059  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
2060  if not loadMe in self.additionalCommands:
2061  self.additionalCommands.append(loadMe)
2062  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
2063 
2064  #change the process name used to address HLT results in any sequence
2065  def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT', verbosityLevel=1):
2066  if proc == None:
2067  proc = self._options.hltProcess if self._options.hltProcess else self.process.name_()
2068  if proc == HLTprocess:
2069  return
2070  # look up all module in sequence
2071  if verbosityLevel > 0:
2072  print("replacing %s process name - sequence %s will use '%s'" % (HLTprocess, sequence, proc))
2073  verboseVisit = (verbosityLevel > 1)
2074  getattr(self.process,sequence).visit(
2075  ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess, proc, whitelist = ("subSystemFolder",), verbose = verboseVisit))
2076  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
2077  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
2079  'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",), verbose = %s))'
2080  % (sequence, HLTprocess, proc, verboseVisit))
2081 
2082  def expandMapping(self,seqList,mapping,index=None):
2083  maxLevel=30
2084  level=0
2085  while '@' in repr(seqList) and level<maxLevel:
2086  level+=1
2087  for specifiedCommand in seqList:
2088  if specifiedCommand.startswith('@'):
2089  location=specifiedCommand[1:]
2090  if not location in mapping:
2091  raise Exception("Impossible to map "+location+" from "+repr(mapping))
2092  mappedTo=mapping[location]
2093  if index!=None:
2094  mappedTo=mappedTo[index]
2095  seqList.remove(specifiedCommand)
2096  seqList.extend(mappedTo.split('+'))
2097  break;
2098  if level==maxLevel:
2099  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
2100 
2101  def prepare_DQM(self, stepSpec = 'DQMOffline'):
2102  # this one needs replacement
2103 
2104  # any 'DQM' job should use DQMStore in non-legacy mode (but not HARVESTING)
2105  self.loadAndRemember("DQMServices/Core/DQMStoreNonLegacy_cff")
2106  _,_dqmSeq,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.DQMOFFLINEDefaultCFF)
2107  sequenceList=_dqmSeq.split('+')
2108  postSequenceList=_dqmSeq.split('+')
2109  from DQMOffline.Configuration.autoDQM import autoDQM
2110  self.expandMapping(sequenceList,autoDQM,index=0)
2111  self.expandMapping(postSequenceList,autoDQM,index=1)
2112 
2113  if len(set(sequenceList))!=len(sequenceList):
2114  sequenceList=list(OrderedSet(sequenceList))
2115  print("Duplicate entries for DQM:, using",sequenceList)
2116 
2117  pathName='dqmoffline_step'
2118  for (i,_sequence) in enumerate(sequenceList):
2119  if (i!=0):
2120  pathName='dqmoffline_%d_step'%(i)
2121 
2122  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
2123  self.renameHLTprocessInSequence(_sequence)
2124 
2125  setattr(self.process,pathName, cms.EndPath( getattr(self.process,_sequence ) ) )
2126  self.schedule.append(getattr(self.process,pathName))
2127 
2128  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
2129  #will get in the schedule, smoothly
2130  getattr(self.process,pathName).insert(0,self.process.genstepfilter)
2131 
2132 
2133  pathName='dqmofflineOnPAT_step'
2134  for (i,_sequence) in enumerate(postSequenceList):
2135  #Fix needed to avoid duplication of sequences not defined in autoDQM or without a PostDQM
2136  if (sequenceList[i]==postSequenceList[i]):
2137  continue
2138  if (i!=0):
2139  pathName='dqmofflineOnPAT_%d_step'%(i)
2140 
2141  setattr(self.process,pathName, cms.EndPath( getattr(self.process, _sequence ) ) )
2142  self.schedule.append(getattr(self.process,pathName))
2143 
2144  def prepare_HARVESTING(self, stepSpec = None):
2145  """ Enrich the process with harvesting step """
2146  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
2148 
2149  harvestingConfig,sequence,_ = self.loadDefaultOrSpecifiedCFF(stepSpec,self.HARVESTINGDefaultCFF)
2150 
2151  # decide which HARVESTING paths to use
2152  harvestingList = sequence.split("+")
2153  from DQMOffline.Configuration.autoDQM import autoDQM
2154  from Validation.Configuration.autoValidation import autoValidation
2155  import copy
2156  combined_mapping = copy.deepcopy( autoDQM )
2157  combined_mapping.update( autoValidation )
2158  self.expandMapping(harvestingList,combined_mapping,index=-1)
2159 
2160  if len(set(harvestingList))!=len(harvestingList):
2161  harvestingList=list(OrderedSet(harvestingList))
2162  print("Duplicate entries for HARVESTING, using",harvestingList)
2163 
2164  for name in harvestingList:
2165  if not name in harvestingConfig.__dict__:
2166  print(name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2167  # trigger hard error, like for other sequence types
2168  getattr(self.process, name)
2169  continue
2170  harvestingstream = getattr(harvestingConfig,name)
2171  if isinstance(harvestingstream,cms.Path):
2172  self.schedule.append(harvestingstream)
2173  self.blacklist_paths.append(harvestingstream)
2174  if isinstance(harvestingstream,cms.Sequence):
2175  setattr(self.process,name+"_step",cms.Path(harvestingstream))
2176  self.schedule.append(getattr(self.process,name+"_step"))
2177 
2178  # # NOTE: the "hltProcess" option currently does nothing in the HARVEST step
2179  # if self._options.hltProcess or ('HLT' in self.stepMap):
2180  # pass
2181 
2182  self.scheduleSequence('DQMSaver','dqmsave_step')
2183  return
2184 
2185  def prepare_ALCAHARVEST(self, stepSpec = None):
2186  """ Enrich the process with AlCaHarvesting step """
2187  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2188  sequence=stepSpec.split(".")[-1]
2189 
2190  # decide which AlcaHARVESTING paths to use
2191  harvestingList = sequence.split("+")
2192 
2193 
2194 
2195  from Configuration.AlCa.autoPCL import autoPCL
2196  self.expandMapping(harvestingList,autoPCL)
2197 
2198  for name in harvestingConfig.__dict__:
2199  harvestingstream = getattr(harvestingConfig,name)
2200  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2201  self.schedule.append(harvestingstream)
2202  if isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_dbOutput"), cms.VPSet) and \
2203  isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_metadata"), cms.VPSet):
2204  self.executeAndRemember("process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name + "_dbOutput)")
2205  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name + "_metadata)")
2206  else:
2207  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2208  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2209  harvestingList.remove(name)
2210  # append the common part at the end of the sequence
2211  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2212  self.schedule.append(lastStep)
2213 
2214  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2215  print("The following harvesting could not be found : ", harvestingList)
2216  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2217 
2218 
2219 
2220  def prepare_ENDJOB(self, stepSpec = 'endOfProcess'):
2221  _,_endjobSeq,_=self.loadDefaultOrSpecifiedCFF(stepSpec,self.ENDJOBDefaultCFF)
2222  self.scheduleSequenceAtEnd(_endjobSeq,'endjob_step')
2223  return
2224 
2225  def finalizeFastSimHLT(self):
2226  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2227  self.schedule.append(self.process.reconstruction)
2228 
2229 
2230  def build_production_info(self, evt_type, evtnumber):
2231  """ Add useful info for the production. """
2232  self.process.configurationMetadata=cms.untracked.PSet\
2233  (version=cms.untracked.string("$Revision: 1.19 $"),
2234  name=cms.untracked.string("Applications"),
2235  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2236  )
2237 
2238  self.addedObjects.append(("Production Info","configurationMetadata"))
2239 
2240 
2241  def create_process(self):
2242  self.pythonCfgCode = "# Auto generated configuration file\n"
2243  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2244  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2245  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2246 
2247  # now set up the modifies
2248  modifiers=[]
2249  modifierStrings=[]
2250  modifierImports=[]
2251 
2252  if hasattr(self._options,"era") and self._options.era :
2253  # Multiple eras can be specified in a comma seperated list
2254  from Configuration.StandardSequences.Eras import eras
2255  for requestedEra in self._options.era.split(",") :
2256  modifierStrings.append(requestedEra)
2257  modifierImports.append(eras.pythonCfgLines[requestedEra])
2258  modifiers.append(getattr(eras,requestedEra))
2259 
2260 
2261  if hasattr(self._options,"procModifiers") and self._options.procModifiers:
2262  import importlib
2263  thingsImported=[]
2264  for c in self._options.procModifiers:
2265  thingsImported.extend(c.split(","))
2266  for pm in thingsImported:
2267  modifierStrings.append(pm)
2268  modifierImports.append('from Configuration.ProcessModifiers.'+pm+'_cff import '+pm)
2269  modifiers.append(getattr(importlib.import_module('Configuration.ProcessModifiers.'+pm+'_cff'),pm))
2270 
2271  self.pythonCfgCode += '\n'.join(modifierImports)+'\n\n'
2272  self.pythonCfgCode += "process = cms.Process('"+self._options.name+"'" # Start of the line, finished after the loop
2273 
2274 
2275  if len(modifierStrings)>0:
2276  self.pythonCfgCode+= ','+','.join(modifierStrings)
2277  self.pythonCfgCode+=')\n\n'
2278 
2279  #yes, the cfg code gets out of sync here if a process is passed in. That could be fixed in the future
2280  #assuming there is some way for the fwk to get the list of modifiers (and their stringified name)
2281  if self.process == None:
2282  if len(modifiers)>0:
2283  self.process = cms.Process(self._options.name,*modifiers)
2284  else:
2285  self.process = cms.Process(self._options.name)
2286 
2287 
2288 
2289 
2290  def prepare(self, doChecking = False):
2291  """ Prepare the configuration string and add missing pieces."""
2292 
2293  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2294  self.addMaxEvents()
2295  if self.with_input:
2296  self.addSource()
2297  self.addStandardSequences()
2298 
2299  self.completeInputCommand()
2300  self.addConditions()
2301 
2302 
2303  outputModuleCfgCode=""
2304  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2305  outputModuleCfgCode=self.addOutput()
2306 
2307  self.addCommon()
2308 
2309  self.pythonCfgCode += "# import of standard configurations\n"
2310  for module in self.imports:
2311  self.pythonCfgCode += ("process.load('"+module+"')\n")
2312 
2313  # production info
2314  if not hasattr(self.process,"configurationMetadata"):
2315  self.build_production_info(self._options.evt_type, self._options.number)
2316  else:
2317  #the PSet was added via a load
2318  self.addedObjects.append(("Production Info","configurationMetadata"))
2319 
2320  self.pythonCfgCode +="\n"
2321  for comment,object in self.addedObjects:
2322  if comment!="":
2323  self.pythonCfgCode += "\n# "+comment+"\n"
2324  self.pythonCfgCode += dumpPython(self.process,object)
2325 
2326  # dump the output definition
2327  self.pythonCfgCode += "\n# Output definition\n"
2328  self.pythonCfgCode += outputModuleCfgCode
2329 
2330  # dump all additional outputs (e.g. alca or skim streams)
2331  self.pythonCfgCode += "\n# Additional output definition\n"
2332  #I do not understand why the keys are not normally ordered.
2333  nl=sorted(self.additionalOutputs.keys())
2334  for name in nl:
2335  output = self.additionalOutputs[name]
2336  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2337  tmpOut = cms.EndPath(output)
2338  setattr(self.process,name+'OutPath',tmpOut)
2339  self.schedule.append(tmpOut)
2340 
2341  # dump all additional commands
2342  self.pythonCfgCode += "\n# Other statements\n"
2343  for command in self.additionalCommands:
2344  self.pythonCfgCode += command + "\n"
2345 
2346  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2347  for object in self._options.inlineObjects.split(','):
2348  if not object:
2349  continue
2350  if not hasattr(self.process,object):
2351  print('cannot inline -'+object+'- : not known')
2352  else:
2353  self.pythonCfgCode +='\n'
2354  self.pythonCfgCode +=dumpPython(self.process,object)
2355 
2356  if self._options.pileup=='HiMixEmbGEN':
2357  self.pythonCfgCode += "\nprocess.generator.embeddingMode=cms.int32(1)\n"
2358 
2359  # dump all paths
2360  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2361  for path in self.process.paths:
2362  if getattr(self.process,path) not in self.blacklist_paths:
2363  self.pythonCfgCode += dumpPython(self.process,path)
2364 
2365  for endpath in self.process.endpaths:
2366  if getattr(self.process,endpath) not in self.blacklist_paths:
2367  self.pythonCfgCode += dumpPython(self.process,endpath)
2368 
2369  # dump the schedule
2370  self.pythonCfgCode += "\n# Schedule definition\n"
2371 
2372  # handling of the schedule
2373  pathNames = ['process.'+p.label_() for p in self.schedule]
2374  if self.process.schedule == None:
2375  self.process.schedule = cms.Schedule()
2376  for item in self.schedule:
2377  self.process.schedule.append(item)
2378  result = 'process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2379  else:
2380  if not isinstance(self.scheduleIndexOfFirstHLTPath, int):
2381  raise Exception('the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2382 
2383  for index, item in enumerate(self.schedule):
2384  if index < self.scheduleIndexOfFirstHLTPath:
2385  self.process.schedule.insert(index, item)
2386  else:
2387  self.process.schedule.append(item)
2388 
2389  result = "# process.schedule imported from cff in HLTrigger.Configuration\n"
2390  for index, item in enumerate(pathNames[:self.scheduleIndexOfFirstHLTPath]):
2391  result += 'process.schedule.insert('+str(index)+', '+item+')\n'
2392  if self.scheduleIndexOfFirstHLTPath < len(pathNames):
2393  result += 'process.schedule.extend(['+','.join(pathNames[self.scheduleIndexOfFirstHLTPath:])+'])\n'
2394 
2395  self.pythonCfgCode += result
2396 
2397  for labelToAssociate in self.labelsToAssociate:
2398  self.process.schedule.associate(getattr(self.process, labelToAssociate))
2399  self.pythonCfgCode += 'process.schedule.associate(process.' + labelToAssociate + ')\n'
2400 
2401  from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
2403  self.pythonCfgCode+="from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n"
2404  self.pythonCfgCode+="associatePatAlgosToolsTask(process)\n"
2405 
2406  overrideThreads = (self._options.nThreads != 1)
2407  overrideConcurrentLumis = (self._options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2408  overrideConcurrentIOVs = (self._options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2409 
2410  if overrideThreads or overrideConcurrentLumis or overrideConcurrentIOVs:
2411  self.pythonCfgCode +="\n"
2412  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2413  if overrideThreads:
2414  self.pythonCfgCode +="process.options.numberOfThreads = {}\n".format(self._options.nThreads)
2415  self.pythonCfgCode +="process.options.numberOfStreams = {}\n".format(self._options.nStreams)
2416  self.process.options.numberOfThreads = self._options.nThreads
2417  self.process.options.numberOfStreams = self._options.nStreams
2418  if overrideConcurrentLumis:
2419  self.pythonCfgCode +="process.options.numberOfConcurrentLuminosityBlocks = {}\n".format(self._options.nConcurrentLumis)
2420  self.process.options.numberOfConcurrentLuminosityBlocks = self._options.nConcurrentLumis
2421  if overrideConcurrentIOVs:
2422  self.pythonCfgCode +="process.options.eventSetup.numberOfConcurrentIOVs = {}\n".format(self._options.nConcurrentIOVs)
2423  self.process.options.eventSetup.numberOfConcurrentIOVs = self._options.nConcurrentIOVs
2424 
2425  if self._options.accelerators is not None:
2426  accelerators = self._options.accelerators.split(',')
2427  self.pythonCfgCode += "\n"
2428  self.pythonCfgCode += "# Enable only these accelerator backends\n"
2429  self.pythonCfgCode += "process.load('Configuration.StandardSequences.Accelerators_cff')\n"
2430  self.pythonCfgCode += "process.options.accelerators = ['" + "', '".join(accelerators) + "']\n"
2431  self.process.load('Configuration.StandardSequences.Accelerators_cff')
2432  self.process.options.accelerators = accelerators
2433 
2434  #repacked version
2435  if self._options.isRepacked:
2436  self.pythonCfgCode +="\n"
2437  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2438  self.pythonCfgCode +="MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n"
2439  MassReplaceInputTag(self.process, new="rawDataMapperByLabel", old="rawDataCollector")
2440 
2441  # special treatment in case of production filter sequence 2/2
2442  if self.productionFilterSequence and not (self._options.pileup=='HiMixEmbGEN'):
2443  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2444  self.pythonCfgCode +='for path in process.paths:\n'
2445  if len(self.conditionalPaths):
2446  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2447  if len(self.excludedPaths):
2448  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2449  self.pythonCfgCode +='\tgetattr(process,path).insert(0, process.%s)\n'%(self.productionFilterSequence,)
2450  pfs = getattr(self.process,self.productionFilterSequence)
2451  for path in self.process.paths:
2452  if not path in self.conditionalPaths: continue
2453  if path in self.excludedPaths: continue
2454  getattr(self.process,path).insert(0, pfs)
2455 
2456 
2457  # dump customise fragment
2458  self.pythonCfgCode += self.addCustomise()
2459 
2460  if self._options.runUnscheduled:
2461  print("--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2462  # Keep the "unscheduled customise functions" separate for now,
2463  # there are customize functions given by users (in our unit
2464  # tests) that need to be run before the "unscheduled customise
2465  # functions"
2466  self.pythonCfgCode += self.addCustomise(1)
2467 
2468  self.pythonCfgCode += self.addCustomiseCmdLine()
2469 
2470  if hasattr(self.process,"logErrorHarvester"):
2471  #configure logErrorHarvester to wait for same EDProducers to finish as the OutputModules
2472  self.pythonCfgCode +="\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n"
2473  self.pythonCfgCode +="from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n"
2474  self.pythonCfgCode +="process = customiseLogErrorHarvesterUsingOutputCommands(process)\n"
2475  from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands
2477 
2478  # Temporary hack to put the early delete customization after
2479  # everything else
2480  #
2481  # FIXME: remove when no longer needed
2482  self.pythonCfgCode += "\n# Add early deletion of temporary data products to reduce peak memory need\n"
2483  self.pythonCfgCode += "from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2484  self.pythonCfgCode += "process = customiseEarlyDelete(process)\n"
2485  self.pythonCfgCode += "# End adding early deletion\n"
2486  from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
2487  self.process = customiseEarlyDelete(self.process)
2488 
2489  imports = cms.specialImportRegistry.getSpecialImports()
2490  if len(imports) > 0:
2491  #need to inject this at the top
2492  index = self.pythonCfgCode.find("import FWCore.ParameterSet.Config")
2493  #now find the end of line
2494  index = self.pythonCfgCode.find("\n",index)
2495  self.pythonCfgCode = self.pythonCfgCode[:index]+ "\n" + "\n".join(imports)+"\n" +self.pythonCfgCode[index:]
2496 
2497 
2498  # make the .io file
2499 
2500  if self._options.io:
2501  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2502  if not self._options.io.endswith('.io'): self._option.io+='.io'
2503  io=open(self._options.io,'w')
2504  ioJson={}
2505  if hasattr(self.process.source,"fileNames"):
2506  if len(self.process.source.fileNames.value()):
2507  ioJson['primary']=self.process.source.fileNames.value()
2508  if hasattr(self.process.source,"secondaryFileNames"):
2509  if len(self.process.source.secondaryFileNames.value()):
2510  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2511  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2512  ioJson['pileup']=self._options.pileup_input[4:]
2513  for (o,om) in self.process.outputModules_().items():
2514  ioJson[o]=om.fileName.value()
2515  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2516  if self.productionFilterSequence:
2517  ioJson['filter']=self.productionFilterSequence
2518  import json
2519  io.write(json.dumps(ioJson))
2520  return
2521 
2522 
def load(self, includeFile)
def prepare_L1(self, stepSpec=None)
def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ='')
def expandMapping(self, seqList, mapping, index=None)
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT', verbosityLevel=1)
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:37
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
Definition: MassReplace.py:79
ALPAKA_FN_HOST_ACC ALPAKA_FN_INLINE constexpr float zip(ConstView const &tracks, int32_t i)
Definition: TracksSoA.h:90
inliner
load the relevant part
def expandNanoMapping(seqList, mapping, key)
Definition: autoNANO.py:1
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def prepare_CFWRITER(self, stepSpec=None)
def prepare_L1P2GT(self, stepSpec=None)
def prepare_RECOBEFMIX(self, stepSpec="reconstruction")
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
Definition: FindCaloHit.cc:19
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, stepSpec=None)
assert(be >=bs)
def build_production_info(self, evt_type, evtnumber)
def ProcessName(process)
Definition: CustomConfigs.py:6
def prepare_RECOSIM(self, stepSpec="recosim")
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def prepare_L1HwVal(self, stepSpec='L1HwVal')
def prepare_DIGI2RAW(self, stepSpec=None)
def prepare_POSTRECO(self, stepSpec=None)
def prepare_SKIM(self, stepSpec="all")
def prepare_ALCAPRODUCER(self, stepSpec=None)
def prepare_HARVESTING(self, stepSpec=None)
def prepare_ALCAOUTPUT(self, stepSpec=None)
def prepare_RAW2DIGI(self, stepSpec="RawToDigi")
def prepare_GEN(self, stepSpec=None)
def prepare_FILTER(self, stepSpec=None)
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
def prepare_PAT(self, stepSpec="miniAOD")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_L1Reco(self, stepSpec="L1Reco")
def prepare_HLT(self, stepSpec=None)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
Definition: HCMethods.h:50
def prepare_DIGI(self, stepSpec=None)
def loadAndRemember(self, includeFile)
def prepare_ENDJOB(self, stepSpec='endOfProcess')
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_DQM(self, stepSpec='DQMOffline')
def prepare_ALCAHARVEST(self, stepSpec=None)
static std::string join(char **cmd)
Definition: RemoteFile.cc:19
def prepare_USER(self, stepSpec=None)
def prepare_ALCA(self, stepSpec=None, workflow='full')
def defineMixing(dict)
Definition: Mixing.py:209
def dumpPython(process, name)
def miniAOD_customizeOutput(out)
def encode(args, files)
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
Definition: helpers.py:24
def prepare_REPACK(self, stepSpec=None)
def prepare_NANOGEN(self, stepSpec="nanoAOD")
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_L1REPACK(self, stepSpec=None)
def prepare_L1TrackTrigger(self, stepSpec="L1TrackTrigger")
def prepare_RAW2RECO(self, stepSpec=None)
def prepare_NANO(self, stepSpec='')
def prepare_VALIDATION(self, stepSpec='validation')
def lumi_to_run(runs, events_in_sample, events_per_job)
Definition: LumiToRun.py:1
def scheduleSequenceAtEnd(self, seq, prefix)
#define str(s)
def prepare_RECO(self, stepSpec="reconstruction")
def prepare_SIM(self, stepSpec=None)
def filesFromList(fileName, s=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def prepare_PATGEN(self, stepSpec="miniGEN")
def prepare_LHE(self, stepSpec=None)
def prepare_DATAMIX(self, stepSpec=None)
def executeAndRemember(self, command)
nextScheduleIsConditional
put the filtering path in the schedule