CMS 3D CMS Logo

ConfigBuilder.py
Go to the documentation of this file.
1 #! /usr/bin/env python3
2 
3 from __future__ import print_function
4 __version__ = "$Revision: 1.19 $"
5 __source__ = "$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
6 
7 import FWCore.ParameterSet.Config as cms
8 from FWCore.ParameterSet.Modules import _Module
9 # The following import is provided for backward compatibility reasons.
10 # The function used to be defined in this file.
11 from FWCore.ParameterSet.MassReplace import massReplaceInputTag as MassReplaceInputTag
12 
13 import hashlib
14 import sys
15 import re
16 import collections
17 from subprocess import Popen,PIPE
18 import FWCore.ParameterSet.DictTypes as DictTypes
19 class Options:
20  pass
21 
22 # the canonical defaults
23 defaultOptions = Options()
24 defaultOptions.datamix = 'DataOnSim'
25 defaultOptions.isMC=False
26 defaultOptions.isData=True
27 defaultOptions.step=''
28 defaultOptions.pileup='NoPileUp'
29 defaultOptions.pileup_input = None
30 defaultOptions.pileup_dasoption = ''
31 defaultOptions.geometry = 'SimDB'
32 defaultOptions.geometryExtendedOptions = ['ExtendedGFlash','Extended','NoCastor']
33 defaultOptions.magField = ''
34 defaultOptions.conditions = None
35 defaultOptions.scenarioOptions=['pp','cosmics','nocoll','HeavyIons']
36 defaultOptions.harvesting= 'AtRunEnd'
37 defaultOptions.gflash = False
38 defaultOptions.number = -1
39 defaultOptions.number_out = None
40 defaultOptions.arguments = ""
41 defaultOptions.name = "NO NAME GIVEN"
42 defaultOptions.evt_type = ""
43 defaultOptions.filein = ""
44 defaultOptions.dasquery=""
45 defaultOptions.dasoption=""
46 defaultOptions.secondfilein = ""
47 defaultOptions.customisation_file = []
48 defaultOptions.customisation_file_unsch = []
49 defaultOptions.customise_commands = ""
50 defaultOptions.inline_custom=False
51 defaultOptions.particleTable = 'pythiapdt'
52 defaultOptions.particleTableList = ['pythiapdt','pdt']
53 defaultOptions.dirin = ''
54 defaultOptions.dirout = ''
55 defaultOptions.filetype = 'EDM'
56 defaultOptions.fileout = 'output.root'
57 defaultOptions.filtername = ''
58 defaultOptions.lazy_download = False
59 defaultOptions.custom_conditions = ''
60 defaultOptions.hltProcess = ''
61 defaultOptions.eventcontent = None
62 defaultOptions.datatier = None
63 defaultOptions.inlineEventContent = True
64 defaultOptions.inlineObjets =''
65 defaultOptions.hideGen=False
66 from Configuration.StandardSequences.VtxSmeared import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
67 defaultOptions.beamspot=None
68 defaultOptions.outputDefinition =''
69 defaultOptions.inputCommands = None
70 defaultOptions.outputCommands = None
71 defaultOptions.inputEventContent = ''
72 defaultOptions.dropDescendant = False
73 defaultOptions.relval = None
74 defaultOptions.profile = None
75 defaultOptions.isRepacked = False
76 defaultOptions.restoreRNDSeeds = False
77 defaultOptions.donotDropOnInput = ''
78 defaultOptions.python_filename =''
79 defaultOptions.io=None
80 defaultOptions.lumiToProcess=None
81 defaultOptions.fast=False
82 defaultOptions.runsAndWeightsForMC = None
83 defaultOptions.runsScenarioForMC = None
84 defaultOptions.runsAndWeightsForMCIntegerWeights = None
85 defaultOptions.runsScenarioForMCIntegerWeights = None
86 defaultOptions.runUnscheduled = False
87 defaultOptions.timeoutOutput = False
88 defaultOptions.nThreads = '1'
89 defaultOptions.nStreams = '0'
90 defaultOptions.nConcurrentLumis = '0'
91 defaultOptions.nConcurrentIOVs = '0'
92 defaultOptions.accelerators = None
93 
94 # some helper routines
95 def dumpPython(process,name):
96  theObject = getattr(process,name)
97  if isinstance(theObject,cms.Path) or isinstance(theObject,cms.EndPath) or isinstance(theObject,cms.Sequence):
98  return "process."+name+" = " + theObject.dumpPython()
99  elif isinstance(theObject,_Module) or isinstance(theObject,cms.ESProducer):
100  return "process."+name+" = " + theObject.dumpPython()+"\n"
101  else:
102  return "process."+name+" = " + theObject.dumpPython()+"\n"
103 def filesFromList(fileName,s=None):
104  import os
105  import FWCore.ParameterSet.Config as cms
106  prim=[]
107  sec=[]
108  for line in open(fileName,'r'):
109  if line.count(".root")>=2:
110  #two files solution...
111  entries=line.replace("\n","").split()
112  prim.append(entries[0])
113  sec.append(entries[1])
114  elif (line.find(".root")!=-1):
115  entry=line.replace("\n","")
116  prim.append(entry)
117  # remove any duplicates but keep the order
118  file_seen = set()
119  prim = [f for f in prim if not (f in file_seen or file_seen.add(f))]
120  file_seen = set()
121  sec = [f for f in sec if not (f in file_seen or file_seen.add(f))]
122  if s:
123  if not hasattr(s,"fileNames"):
124  s.fileNames=cms.untracked.vstring(prim)
125  else:
126  s.fileNames.extend(prim)
127  if len(sec)!=0:
128  if not hasattr(s,"secondaryFileNames"):
129  s.secondaryFileNames=cms.untracked.vstring(sec)
130  else:
131  s.secondaryFileNames.extend(sec)
132  print("found files: ",prim)
133  if len(prim)==0:
134  raise Exception("There are not files in input from the file list")
135  if len(sec)!=0:
136  print("found parent files:",sec)
137  return (prim,sec)
138 
139 def filesFromDASQuery(query,option="",s=None):
140  import os,time
141  import FWCore.ParameterSet.Config as cms
142  prim=[]
143  sec=[]
144  print("the query is",query)
145  eC=5
146  count=0
147  while eC!=0 and count<3:
148  if count!=0:
149  print('Sleeping, then retrying DAS')
150  time.sleep(100)
151  p = Popen('dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=True, universal_newlines=True)
152  pipe=p.stdout.read()
153  tupleP = os.waitpid(p.pid, 0)
154  eC=tupleP[1]
155  count=count+1
156  if eC==0:
157  print("DAS succeeded after",count,"attempts",eC)
158  else:
159  print("DAS failed 3 times- I give up")
160  for line in pipe.split('\n'):
161  if line.count(".root")>=2:
162  #two files solution...
163  entries=line.replace("\n","").split()
164  prim.append(entries[0])
165  sec.append(entries[1])
166  elif (line.find(".root")!=-1):
167  entry=line.replace("\n","")
168  prim.append(entry)
169  # remove any duplicates
170  prim = sorted(list(set(prim)))
171  sec = sorted(list(set(sec)))
172  if s:
173  if not hasattr(s,"fileNames"):
174  s.fileNames=cms.untracked.vstring(prim)
175  else:
176  s.fileNames.extend(prim)
177  if len(sec)!=0:
178  if not hasattr(s,"secondaryFileNames"):
179  s.secondaryFileNames=cms.untracked.vstring(sec)
180  else:
181  s.secondaryFileNames.extend(sec)
182  print("found files: ",prim)
183  if len(sec)!=0:
184  print("found parent files:",sec)
185  return (prim,sec)
186 
187 def anyOf(listOfKeys,dict,opt=None):
188  for k in listOfKeys:
189  if k in dict:
190  toReturn=dict[k]
191  dict.pop(k)
192  return toReturn
193  if opt!=None:
194  return opt
195  else:
196  raise Exception("any of "+','.join(listOfKeys)+" are mandatory entries of --output options")
197 
199  """The main building routines """
200 
201  def __init__(self, options, process = None, with_output = False, with_input = False ):
202  """options taken from old cmsDriver and optparse """
203 
204  options.outfile_name = options.dirout+options.fileout
205 
206  self._options = options
207 
208  if self._options.isData and options.isMC:
209  raise Exception("ERROR: You may specify only --data or --mc, not both")
210  #if not self._options.conditions:
211  # raise Exception("ERROR: No conditions given!\nPlease specify conditions. E.g. via --conditions=IDEAL_30X::All")
212 
213  # check that MEtoEDMConverter (running in ENDJOB) and DQMIO don't run in the same job
214  if 'ENDJOB' in self._options.step:
215  if (hasattr(self._options,"outputDefinition") and \
216  self._options.outputDefinition != '' and \
217  any(anyOf(['t','tier','dataTier'],outdic) == 'DQMIO' for outdic in eval(self._options.outputDefinition))) or \
218  (hasattr(self._options,"datatier") and \
219  self._options.datatier and \
220  'DQMIO' in self._options.datatier):
221  print("removing ENDJOB from steps since not compatible with DQMIO dataTier")
222  self._options.step=self._options.step.replace(',ENDJOB','')
223 
224 
225 
226  # what steps are provided by this class?
227  stepList = [re.sub(r'^prepare_', '', methodName) for methodName in ConfigBuilder.__dict__ if methodName.startswith('prepare_')]
228  self.stepMap={}
229  self.stepKeys=[]
230  for step in self._options.step.split(","):
231  if step=='': continue
232  stepParts = step.split(":")
233  stepName = stepParts[0]
234  if stepName not in stepList and not stepName.startswith('re'):
235  raise ValueError("Step "+stepName+" unknown")
236  if len(stepParts)==1:
237  self.stepMap[stepName]=""
238  elif len(stepParts)==2:
239  self.stepMap[stepName]=stepParts[1].split('+')
240  elif len(stepParts)==3:
241  self.stepMap[stepName]=(stepParts[2].split('+'),stepParts[1])
242  else:
243  raise ValueError("Step definition "+step+" invalid")
244  self.stepKeys.append(stepName)
245 
246  #print "map of steps is:",self.stepMap
247 
248  self.with_output = with_output
249  self.process=process
250 
251  if hasattr(self._options,"no_output_flag") and self._options.no_output_flag:
252  self.with_output = False
253  self.with_input = with_input
254  self.imports = []
255  self.create_process()
256  self.define_Configs()
257  self.schedule = list()
259 
260  # we are doing three things here:
261  # creating a process to catch errors
262  # building the code to re-create the process
263 
265  # TODO: maybe a list of to be dumped objects would help as well
266  self.blacklist_paths = []
267  self.addedObjects = []
269 
275 
276  def profileOptions(self):
277  """
278  addIgProfService
279  Function to add the igprof profile service so that you can dump in the middle
280  of the run.
281  """
282  profileOpts = self._options.profile.split(':')
283  profilerStart = 1
284  profilerInterval = 100
285  profilerFormat = None
286  profilerJobFormat = None
287 
288  if len(profileOpts):
289  #type, given as first argument is unused here
290  profileOpts.pop(0)
291  if len(profileOpts):
292  startEvent = profileOpts.pop(0)
293  if not startEvent.isdigit():
294  raise Exception("%s is not a number" % startEvent)
295  profilerStart = int(startEvent)
296  if len(profileOpts):
297  eventInterval = profileOpts.pop(0)
298  if not eventInterval.isdigit():
299  raise Exception("%s is not a number" % eventInterval)
300  profilerInterval = int(eventInterval)
301  if len(profileOpts):
302  profilerFormat = profileOpts.pop(0)
303 
304 
305  if not profilerFormat:
306  profilerFormat = "%s___%s___%%I.gz" % (
307  self._options.evt_type.replace("_cfi", ""),
308  hashlib.md5(
309  (str(self._options.step) + str(self._options.pileup) + str(self._options.conditions) +
310  str(self._options.datatier) + str(self._options.profileTypeLabel)).encode('utf-8')
311  ).hexdigest()
312  )
313  if not profilerJobFormat and profilerFormat.endswith(".gz"):
314  profilerJobFormat = profilerFormat.replace(".gz", "_EndOfJob.gz")
315  elif not profilerJobFormat:
316  profilerJobFormat = profilerFormat + "_EndOfJob.gz"
317 
318  return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
319 
320  def load(self,includeFile):
321  includeFile = includeFile.replace('/','.')
322  self.process.load(includeFile)
323  return sys.modules[includeFile]
324 
325  def loadAndRemember(self, includeFile):
326  """helper routine to load am memorize imports"""
327  # we could make the imports a on-the-fly data method of the process instance itself
328  # not sure if the latter is a good idea
329  includeFile = includeFile.replace('/','.')
330  self.imports.append(includeFile)
331  self.process.load(includeFile)
332  return sys.modules[includeFile]
333 
334  def executeAndRemember(self, command):
335  """helper routine to remember replace statements"""
336  self.additionalCommands.append(command)
337  if not command.strip().startswith("#"):
338  # substitute: process.foo = process.bar -> self.process.foo = self.process.bar
339  import re
340  exec(re.sub(r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",r"\1self.process\3",command))
341  #exec(command.replace("process.","self.process."))
342 
343  def addCommon(self):
344  if 'HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys():
345  self.process.options.Rethrow = ['ProductNotFound']
346  self.process.options.fileMode = 'FULLMERGE'
347 
348  self.addedObjects.append(("","options"))
349 
350  if self._options.lazy_download:
351  self.process.AdaptorConfig = cms.Service("AdaptorConfig",
352  stats = cms.untracked.bool(True),
353  enable = cms.untracked.bool(True),
354  cacheHint = cms.untracked.string("lazy-download"),
355  readHint = cms.untracked.string("read-ahead-buffered")
356  )
357  self.addedObjects.append(("Setup lazy download","AdaptorConfig"))
358 
359  #self.process.cmsDriverCommand = cms.untracked.PSet( command=cms.untracked.string('cmsDriver.py '+self._options.arguments) )
360  #self.addedObjects.append(("what cmsDriver command was used","cmsDriverCommand"))
361 
362  if self._options.profile:
363  (start, interval, eventFormat, jobFormat)=self.profileOptions()
364  self.process.IgProfService = cms.Service("IgProfService",
365  reportFirstEvent = cms.untracked.int32(start),
366  reportEventInterval = cms.untracked.int32(interval),
367  reportToFileAtPostEvent = cms.untracked.string("| gzip -c > %s"%(eventFormat)),
368  reportToFileAtPostEndJob = cms.untracked.string("| gzip -c > %s"%(jobFormat)))
369  self.addedObjects.append(("Setup IGProf Service for profiling","IgProfService"))
370 
371  def addMaxEvents(self):
372  """Here we decide how many evts will be processed"""
373  self.process.maxEvents.input = int(self._options.number)
374  if self._options.number_out:
375  self.process.maxEvents.output = int(self._options.number_out)
376  self.addedObjects.append(("","maxEvents"))
377 
378  def addSource(self):
379  """Here the source is built. Priority: file, generator"""
380  self.addedObjects.append(("Input source","source"))
381 
382  def filesFromOption(self):
383  for entry in self._options.filein.split(','):
384  print("entry",entry)
385  if entry.startswith("filelist:"):
386  filesFromList(entry[9:],self.process.source)
387  elif entry.startswith("dbs:") or entry.startswith("das:"):
388  filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
389  else:
390  self.process.source.fileNames.append(self._options.dirin+entry)
391  if self._options.secondfilein:
392  if not hasattr(self.process.source,"secondaryFileNames"):
393  raise Exception("--secondfilein not compatible with "+self._options.filetype+"input type")
394  for entry in self._options.secondfilein.split(','):
395  print("entry",entry)
396  if entry.startswith("filelist:"):
397  self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
398  elif entry.startswith("dbs:") or entry.startswith("das:"):
399  self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
400  else:
401  self.process.source.secondaryFileNames.append(self._options.dirin+entry)
402 
403  if self._options.filein or self._options.dasquery:
404  if self._options.filetype == "EDM":
405  self.process.source=cms.Source("PoolSource",
406  fileNames = cms.untracked.vstring(),
407  secondaryFileNames= cms.untracked.vstring())
408  filesFromOption(self)
409  elif self._options.filetype == "DAT":
410  self.process.source=cms.Source("NewEventStreamFileReader",fileNames = cms.untracked.vstring())
411  filesFromOption(self)
412  elif self._options.filetype == "LHE":
413  self.process.source=cms.Source("LHESource", fileNames = cms.untracked.vstring())
414  if self._options.filein.startswith("lhe:"):
415  #list the article directory automatically
416  args=self._options.filein.split(':')
417  article=args[1]
418  print('LHE input from article ',article)
419  location='/store/lhe/'
420  import os
421  textOfFiles=os.popen('cmsLHEtoEOSManager.py -l '+article)
422  for line in textOfFiles:
423  for fileName in [x for x in line.split() if '.lhe' in x]:
424  self.process.source.fileNames.append(location+article+'/'+fileName)
425  #check first if list of LHE files is loaded (not empty)
426  if len(line)<2:
427  print('Issue to load LHE files, please check and try again.')
428  sys.exit(-1)
429  #Additional check to protect empty fileNames in process.source
430  if len(self.process.source.fileNames)==0:
431  print('Issue with empty filename, but can pass line check')
432  sys.exit(-1)
433  if len(args)>2:
434  self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
435  else:
436  filesFromOption(self)
437 
438  elif self._options.filetype == "DQM":
439  self.process.source=cms.Source("DQMRootSource",
440  fileNames = cms.untracked.vstring())
441  filesFromOption(self)
442 
443  elif self._options.filetype == "DQMDAQ":
444  # FIXME: how to configure it if there are no input files specified?
445  self.process.source=cms.Source("DQMStreamerReader")
446 
447 
448  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
449  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
450 
451  if self._options.dasquery!='':
452  self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
453  filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
454 
455  if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
456  self.process.source.processingMode = cms.untracked.string("RunsAndLumis")
457 
458 
459  if 'GEN' in self.stepMap.keys() and not self._options.filetype == "LHE":
460  if self._options.inputCommands:
461  self._options.inputCommands+=',drop LHEXMLStringProduct_*_*_*,'
462  else:
463  self._options.inputCommands='keep *, drop LHEXMLStringProduct_*_*_*,'
464 
465  if self.process.source and self._options.inputCommands and not self._options.filetype == "LHE":
466  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
467  for command in self._options.inputCommands.split(','):
468  # remove whitespace around the keep/drop statements
469  command = command.strip()
470  if command=='': continue
471  self.process.source.inputCommands.append(command)
472  if not self._options.dropDescendant:
473  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
474 
475  if self._options.lumiToProcess:
476  import FWCore.PythonUtilities.LumiList as LumiList
477  self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange( LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().split(',') )
478 
479  if 'GEN' in self.stepMap.keys() or 'LHE' in self.stepMap or (not self._options.filein and hasattr(self._options, "evt_type")):
480  if self.process.source is None:
481  self.process.source=cms.Source("EmptySource")
482 
483  # modify source in case of run-dependent MC
484  self.runsAndWeights=None
485  if self._options.runsAndWeightsForMC or self._options.runsScenarioForMC :
486  if not self._options.isMC :
487  raise Exception("options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
488  if self._options.runsAndWeightsForMC:
489  self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
490  else:
491  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
492  if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
493  __import__(RunsAndWeights[self._options.runsScenarioForMC])
494  self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
495  else:
496  self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
497 
498  if self.runsAndWeights:
499  import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun
501  self.additionalCommands.append('import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
502  self.additionalCommands.append('ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
503 
504  # modify source in case of run-dependent MC (Run-3 method)
506  if self._options.runsAndWeightsForMCIntegerWeights or self._options.runsScenarioForMCIntegerWeights:
507  if not self._options.isMC :
508  raise Exception("options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
509  if self._options.runsAndWeightsForMCIntegerWeights:
510  self.runsAndWeightsInt = eval(self._options.runsAndWeightsForMCIntegerWeights)
511  else:
512  from Configuration.StandardSequences.RunsAndWeights import RunsAndWeights
513  if isinstance(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights], str):
514  __import__(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights])
515  self.runsAndWeightsInt = sys.modules[RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
516  else:
517  self.runsAndWeightsInt = RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]
518 
519  if self.runsAndWeightsInt:
520  if not self._options.relval:
521  raise Exception("--relval option required when using --runsAndWeightsInt")
522  if 'DATAMIX' in self._options.step:
523  from SimGeneral.Configuration.LumiToRun import lumi_to_run
524  total_events, events_per_job = self._options.relval.split(',')
525  lumi_to_run_mapping = lumi_to_run(self.runsAndWeightsInt, int(total_events), int(events_per_job))
526  self.additionalCommands.append("process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " + str(lumi_to_run_mapping) + "])")
527 
528  return
529 
530  def addOutput(self):
531  """ Add output module to the process """
532  result=""
533  if self._options.outputDefinition:
534  if self._options.datatier:
535  print("--datatier & --eventcontent options ignored")
536 
537  #new output convention with a list of dict
538  outList = eval(self._options.outputDefinition)
539  for (id,outDefDict) in enumerate(outList):
540  outDefDictStr=outDefDict.__str__()
541  if not isinstance(outDefDict,dict):
542  raise Exception("--output needs to be passed a list of dict"+self._options.outputDefinition+" is invalid")
543  #requires option: tier
544  theTier=anyOf(['t','tier','dataTier'],outDefDict)
545  #optional option: eventcontent, filtername, selectEvents, moduleLabel, filename
546 
547  theStreamType=anyOf(['e','ec','eventContent','streamType'],outDefDict,theTier)
548  theFilterName=anyOf(['f','ftN','filterName'],outDefDict,'')
549  theSelectEvent=anyOf(['s','sE','selectEvents'],outDefDict,'')
550  theModuleLabel=anyOf(['l','mL','moduleLabel'],outDefDict,'')
551  theExtraOutputCommands=anyOf(['o','oC','outputCommands'],outDefDict,'')
552  # module label has a particular role
553  if not theModuleLabel:
554  tryNames=[theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+'output',
555  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+'output',
556  theStreamType.replace(theTier.replace('-',''),'')+theTier.replace('-','')+theFilterName+theSelectEvent.split(',')[0].replace(':','for').replace(' ','')+'output'
557  ]
558  for name in tryNames:
559  if not hasattr(self.process,name):
560  theModuleLabel=name
561  break
562  if not theModuleLabel:
563  raise Exception("cannot find a module label for specification: "+outDefDictStr)
564  if id==0:
565  defaultFileName=self._options.outfile_name
566  else:
567  defaultFileName=self._options.outfile_name.replace('.root','_in'+theTier+'.root')
568 
569  theFileName=self._options.dirout+anyOf(['fn','fileName'],outDefDict,defaultFileName)
570  if not theFileName.endswith('.root'):
571  theFileName+='.root'
572 
573  if len(outDefDict):
574  raise Exception("unused keys from --output options: "+','.join(outDefDict.keys()))
575  if theStreamType=='DQMIO': theStreamType='DQM'
576  if theStreamType=='ALL':
577  theEventContent = cms.PSet(outputCommands = cms.untracked.vstring('keep *'))
578  else:
579  theEventContent = getattr(self.process, theStreamType+"EventContent")
580 
581 
582  addAlCaSelects=False
583  if theStreamType=='ALCARECO' and not theFilterName:
584  theFilterName='StreamALCACombined'
585  addAlCaSelects=True
586 
587  CppType='PoolOutputModule'
588  if self._options.timeoutOutput:
589  CppType='TimeoutPoolOutputModule'
590  if theStreamType=='DQM' and theTier=='DQMIO': CppType='DQMRootOutputModule'
591  output = cms.OutputModule(CppType,
592  theEventContent.clone(),
593  fileName = cms.untracked.string(theFileName),
594  dataset = cms.untracked.PSet(
595  dataTier = cms.untracked.string(theTier),
596  filterName = cms.untracked.string(theFilterName))
597  )
598  if not theSelectEvent and hasattr(self.process,'generation_step') and theStreamType!='LHE':
599  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
600  if not theSelectEvent and hasattr(self.process,'filtering_step'):
601  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
602  if theSelectEvent:
603  output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
604 
605  if addAlCaSelects:
606  if not hasattr(output,'SelectEvents'):
607  output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
608  for alca in self.AlCaPaths:
609  output.SelectEvents.SelectEvents.extend(getattr(self.process,'OutALCARECO'+alca).SelectEvents.SelectEvents)
610 
611 
612  if hasattr(self.process,theModuleLabel):
613  raise Exception("the current process already has a module "+theModuleLabel+" defined")
614  #print "creating output module ",theModuleLabel
615  setattr(self.process,theModuleLabel,output)
616  outputModule=getattr(self.process,theModuleLabel)
617  setattr(self.process,theModuleLabel+'_step',cms.EndPath(outputModule))
618  path=getattr(self.process,theModuleLabel+'_step')
619  self.schedule.append(path)
620 
621  if not self._options.inlineEventContent and hasattr(self.process,theStreamType+"EventContent"):
622  def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)"): return label
623  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
624  if theExtraOutputCommands:
625  if not isinstance(theExtraOutputCommands,list):
626  raise Exception("extra ouput command in --option must be a list of strings")
627  if hasattr(self.process,theStreamType+"EventContent"):
628  self.executeAndRemember('process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
629  else:
630  outputModule.outputCommands.extend(theExtraOutputCommands)
631 
632  result+="\nprocess."+theModuleLabel+" = "+outputModule.dumpPython()
633 
634 
635  return result
636 
637  streamTypes=self._options.eventcontent.split(',')
638  tiers=self._options.datatier.split(',')
639  if not self._options.outputDefinition and len(streamTypes)!=len(tiers):
640  raise Exception("number of event content arguments does not match number of datatier arguments")
641 
642  # if the only step is alca we don't need to put in an output
643  if self._options.step.split(',')[0].split(':')[0] == 'ALCA':
644  return "\n"
645 
646  for i,(streamType,tier) in enumerate(zip(streamTypes,tiers)):
647  if streamType=='': continue
648  if streamType == 'ALCARECO' and not 'ALCAPRODUCER' in self._options.step: continue
649  if streamType=='DQMIO': streamType='DQM'
650  eventContent=streamType
651 
652  if streamType == "NANOEDMAOD" :
653  eventContent = "NANOAOD"
654  elif streamType == "NANOEDMAODSIM" :
655  eventContent = "NANOAODSIM"
656  theEventContent = getattr(self.process, eventContent+"EventContent")
657  if i==0:
658  theFileName=self._options.outfile_name
659  theFilterName=self._options.filtername
660  else:
661  theFileName=self._options.outfile_name.replace('.root','_in'+streamType+'.root')
662  theFilterName=self._options.filtername
663  CppType='PoolOutputModule'
664  if self._options.timeoutOutput:
665  CppType='TimeoutPoolOutputModule'
666  if streamType=='DQM' and tier=='DQMIO': CppType='DQMRootOutputModule'
667  if "NANOAOD" in streamType : CppType='NanoAODOutputModule'
668  output = cms.OutputModule(CppType,
669  theEventContent,
670  fileName = cms.untracked.string(theFileName),
671  dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
672  filterName = cms.untracked.string(theFilterName)
673  )
674  )
675  if hasattr(self.process,"generation_step") and streamType!='LHE':
676  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('generation_step'))
677  if hasattr(self.process,"filtering_step"):
678  output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('filtering_step'))
679 
680  if streamType=='ALCARECO':
681  output.dataset.filterName = cms.untracked.string('StreamALCACombined')
682 
683  if "MINIAOD" in streamType:
684  from PhysicsTools.PatAlgos.slimming.miniAOD_tools import miniAOD_customizeOutput
686 
687  outputModuleName=streamType+'output'
688  setattr(self.process,outputModuleName,output)
689  outputModule=getattr(self.process,outputModuleName)
690  setattr(self.process,outputModuleName+'_step',cms.EndPath(outputModule))
691  path=getattr(self.process,outputModuleName+'_step')
692  self.schedule.append(path)
693 
694  if self._options.outputCommands and streamType!='DQM':
695  for evct in self._options.outputCommands.split(','):
696  if not evct: continue
697  self.executeAndRemember("process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
698 
699  if not self._options.inlineEventContent:
700  tmpstreamType=streamType
701  if "NANOEDM" in tmpstreamType :
702  tmpstreamType=tmpstreamType.replace("NANOEDM","NANO")
703  def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
704  return label
705  outputModule.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
706 
707  result+="\nprocess."+outputModuleName+" = "+outputModule.dumpPython()
708 
709  return result
710 
711  def addStandardSequences(self):
712  """
713  Add selected standard sequences to the process
714  """
715  # load the pile up file
716  if self._options.pileup:
717  pileupSpec=self._options.pileup.split(',')[0]
718 
719  # Does the requested pile-up scenario exist?
720  from Configuration.StandardSequences.Mixing import Mixing,defineMixing
721  if not pileupSpec in Mixing and '.' not in pileupSpec and 'file:' not in pileupSpec:
722  message = pileupSpec+' is not a know mixing scenario:\n available are: '+'\n'.join(Mixing.keys())
723  raise Exception(message)
724 
725  # Put mixing parameters in a dictionary
726  if '.' in pileupSpec:
727  mixingDict={'file':pileupSpec}
728  elif pileupSpec.startswith('file:'):
729  mixingDict={'file':pileupSpec[5:]}
730  else:
731  import copy
732  mixingDict=copy.copy(Mixing[pileupSpec])
733  if len(self._options.pileup.split(','))>1:
734  mixingDict.update(eval(self._options.pileup[self._options.pileup.find(',')+1:]))
735 
736  # Load the pu cfg file corresponding to the requested pu scenario
737  if 'file:' in pileupSpec:
738  #the file is local
739  self.process.load(mixingDict['file'])
740  print("inlining mixing module configuration")
741  self._options.inlineObjets+=',mix'
742  else:
743  self.loadAndRemember(mixingDict['file'])
744 
745  mixingDict.pop('file')
746  if not "DATAMIX" in self.stepMap.keys(): # when DATAMIX is present, pileup_input refers to pre-mixed GEN-RAW
747  if self._options.pileup_input:
748  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
749  mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
750  elif self._options.pileup_input.startswith("filelist:"):
751  mixingDict['F']=(filesFromList(self._options.pileup_input[9:]))[0]
752  else:
753  mixingDict['F']=self._options.pileup_input.split(',')
754  specialization=defineMixing(mixingDict)
755  for command in specialization:
756  self.executeAndRemember(command)
757  if len(mixingDict)!=0:
758  raise Exception('unused mixing specification: '+mixingDict.keys().__str__())
759 
760 
761  # load the geometry file
762  try:
763  if len(self.stepMap):
764  self.loadAndRemember(self.GeometryCFF)
765  if ('SIM' in self.stepMap or 'reSIM' in self.stepMap) and not self._options.fast:
767  if self.geometryDBLabel:
768  self.executeAndRemember('if hasattr(process, "XMLFromDBSource"): process.XMLFromDBSource.label="%s"'%(self.geometryDBLabel))
769  self.executeAndRemember('if hasattr(process, "DDDetectorESProducerFromDB"): process.DDDetectorESProducerFromDB.label="%s"'%(self.geometryDBLabel))
770 
771  except ImportError:
772  print("Geometry option",self._options.geometry,"unknown.")
773  raise
774 
775  if len(self.stepMap):
776  self.loadAndRemember(self.magFieldCFF)
777 
778  for stepName in self.stepKeys:
779  stepSpec = self.stepMap[stepName]
780  print("Step:", stepName,"Spec:",stepSpec)
781  if stepName.startswith('re'):
782 
783  if stepName[2:] not in self._options.donotDropOnInput:
784  self._options.inputEventContent='%s,%s'%(stepName.upper(),self._options.inputEventContent)
785  stepName=stepName[2:]
786  if stepSpec=="":
787  getattr(self,"prepare_"+stepName)(sequence = getattr(self,stepName+"DefaultSeq"))
788  elif isinstance(stepSpec, list):
789  getattr(self,"prepare_"+stepName)(sequence = '+'.join(stepSpec))
790  elif isinstance(stepSpec, tuple):
791  getattr(self,"prepare_"+stepName)(sequence = ','.join([stepSpec[1],'+'.join(stepSpec[0])]))
792  else:
793  raise ValueError("Invalid step definition")
794 
795  if self._options.restoreRNDSeeds!=False:
796  #it is either True, or a process name
797  if self._options.restoreRNDSeeds==True:
798  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
799  else:
800  self.executeAndRemember('process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
801  if self._options.inputEventContent or self._options.inputCommands:
802  if self._options.inputCommands:
803  self._options.inputCommands+='keep *_randomEngineStateProducer_*_*,'
804  else:
805  self._options.inputCommands='keep *_randomEngineStateProducer_*_*,'
806 
807 
808  def completeInputCommand(self):
809  if self._options.inputEventContent:
810  import copy
811  def dropSecondDropStar(iec):
812  #drop occurence of 'drop *' in the list
813  count=0
814  for item in iec:
815  if item=='drop *':
816  if count!=0:
817  iec.remove(item)
818  count+=1
819 
820 
821  if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
822  for evct in self._options.inputEventContent.split(','):
823  if evct=='': continue
824  theEventContent = getattr(self.process, evct+"EventContent")
825  if hasattr(theEventContent,'outputCommands'):
826  self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
827  if hasattr(theEventContent,'inputCommands'):
828  self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
829 
830  dropSecondDropStar(self.process.source.inputCommands)
831 
832  if not self._options.dropDescendant:
833  self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
834 
835 
836  return
837 
838  def addConditions(self):
839  """Add conditions to the process"""
840  if not self._options.conditions: return
841 
842  if 'FrontierConditions_GlobalTag' in self._options.conditions:
843  print('using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
844  self._options.conditions = self._options.conditions.replace("FrontierConditions_GlobalTag,",'')
845 
847  from Configuration.AlCa.GlobalTag import GlobalTag
848  self.process.GlobalTag = GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
849  self.additionalCommands.append('from Configuration.AlCa.GlobalTag import GlobalTag')
850  self.additionalCommands.append('process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
851 
852 
853  def addCustomise(self,unsch=0):
854  """Include the customise code """
855 
856  custOpt=[]
857  if unsch==0:
858  for c in self._options.customisation_file:
859  custOpt.extend(c.split(","))
860  else:
861  for c in self._options.customisation_file_unsch:
862  custOpt.extend(c.split(","))
863 
864  custMap=DictTypes.SortedKeysDict()
865  for opt in custOpt:
866  if opt=='': continue
867  if opt.count('.')>1:
868  raise Exception("more than . in the specification:"+opt)
869  fileName=opt.split('.')[0]
870  if opt.count('.')==0: rest='customise'
871  else:
872  rest=opt.split('.')[1]
873  if rest=='py': rest='customise' #catch the case of --customise file.py
874 
875  if fileName in custMap:
876  custMap[fileName].extend(rest.split('+'))
877  else:
878  custMap[fileName]=rest.split('+')
879 
880  if len(custMap)==0:
881  final_snippet='\n'
882  else:
883  final_snippet='\n# customisation of the process.\n'
884 
885  allFcn=[]
886  for opt in custMap:
887  allFcn.extend(custMap[opt])
888  for fcn in allFcn:
889  if allFcn.count(fcn)!=1:
890  raise Exception("cannot specify twice "+fcn+" as a customisation method")
891 
892  for f in custMap:
893  # let python search for that package and do syntax checking at the same time
894  packageName = f.replace(".py","").replace("/",".")
895  __import__(packageName)
896  package = sys.modules[packageName]
897 
898  # now ask the package for its definition and pick .py instead of .pyc
899  customiseFile = re.sub(r'\.pyc$', '.py', package.__file__)
900 
901  final_snippet+='\n# Automatic addition of the customisation function from '+packageName+'\n'
902  if self._options.inline_custom:
903  for line in file(customiseFile,'r'):
904  if "import FWCore.ParameterSet.Config" in line:
905  continue
906  final_snippet += line
907  else:
908  final_snippet += 'from %s import %s \n'%(packageName,','.join(custMap[f]))
909  for fcn in custMap[f]:
910  print("customising the process with",fcn,"from",f)
911  if not hasattr(package,fcn):
912  #bound to fail at run time
913  raise Exception("config "+f+" has no function "+fcn)
914  #execute the command
915  self.process=getattr(package,fcn)(self.process)
916  #and print it in the configuration
917  final_snippet += "\n#call to customisation function "+fcn+" imported from "+packageName
918  final_snippet += "\nprocess = %s(process)\n"%(fcn,)
919 
920  if len(custMap)!=0:
921  final_snippet += '\n# End of customisation functions\n'
922 
923 
924  return final_snippet
925 
926  def addCustomiseCmdLine(self):
927  final_snippet='\n# Customisation from command line\n'
928  if self._options.customise_commands:
929  import string
930  for com in self._options.customise_commands.split('\\n'):
931  com=com.lstrip()
932  self.executeAndRemember(com)
933  final_snippet +='\n'+com
934 
935  return final_snippet
936 
937  #----------------------------------------------------------------------------
938  # here the methods to define the python includes for each step or
939  # conditions
940  #----------------------------------------------------------------------------
941  def define_Configs(self):
942  if len(self.stepMap):
943  self.loadAndRemember('Configuration/StandardSequences/Services_cff')
944  if self._options.particleTable not in defaultOptions.particleTableList:
945  print('Invalid particle table provided. Options are:')
946  print(defaultOptions.particleTable)
947  sys.exit(-1)
948  else:
949  if len(self.stepMap):
950  self.loadAndRemember('SimGeneral.HepPDTESSource.'+self._options.particleTable+'_cfi')
951 
952  self.loadAndRemember('FWCore/MessageService/MessageLogger_cfi')
953 
954  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreams_cff"
955  self.GENDefaultCFF="Configuration/StandardSequences/Generator_cff"
956  self.SIMDefaultCFF="Configuration/StandardSequences/Sim_cff"
957  self.DIGIDefaultCFF="Configuration/StandardSequences/Digi_cff"
958  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRaw_cff"
959  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1Emulator_cff'
960  self.L1MENUDefaultCFF="Configuration/StandardSequences/L1TriggerDefaultMenu_cff"
961  self.HLTDefaultCFF="Configuration/StandardSequences/HLTtable_cff"
962  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_Data_cff"
963  if self._options.isRepacked: self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_DataMapper_cff"
964  self.L1RecoDefaultCFF="Configuration/StandardSequences/L1Reco_cff"
965  self.L1TrackTriggerDefaultCFF="Configuration/StandardSequences/L1TrackTrigger_cff"
966  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_Data_cff"
967  self.RECOSIMDefaultCFF="Configuration/StandardSequences/RecoSim_cff"
968  self.PATDefaultCFF="Configuration/StandardSequences/PAT_cff"
969  self.NANODefaultCFF="PhysicsTools/NanoAOD/nano_cff"
970  self.NANOGENDefaultCFF="PhysicsTools/NanoAOD/nanogen_cff"
971  self.SKIMDefaultCFF="Configuration/StandardSequences/Skims_cff"
972  self.POSTRECODefaultCFF="Configuration/StandardSequences/PostRecoGenerator_cff"
973  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/Validation_cff"
974  self.L1HwValDefaultCFF = "Configuration/StandardSequences/L1HwVal_cff"
975  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOffline_cff"
976  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/Harvesting_cff"
977  self.ALCAHARVESTDefaultCFF="Configuration/StandardSequences/AlCaHarvesting_cff"
978  self.ENDJOBDefaultCFF="Configuration/StandardSequences/EndOfProcess_cff"
979  self.ConditionsDefaultCFF = "Configuration/StandardSequences/FrontierConditions_GlobalTag_cff"
980  self.CFWRITERDefaultCFF = "Configuration/StandardSequences/CrossingFrameWriter_cff"
981  self.REPACKDefaultCFF="Configuration/StandardSequences/DigiToRaw_Repack_cff"
983  if "DATAMIX" in self.stepMap.keys():
984  self.DATAMIXDefaultCFF="Configuration/StandardSequences/DataMixer"+self._options.datamix+"_cff"
985  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiDM_cff"
986  self.DIGI2RAWDefaultCFF="Configuration/StandardSequences/DigiToRawDM_cff"
987  self.L1EMDefaultCFF='Configuration/StandardSequences/SimL1EmulatorDM_cff'
988 
989  self.ALCADefaultSeq=None
990  self.LHEDefaultSeq='externalLHEProducer'
991  self.GENDefaultSeq='pgen'
992  self.SIMDefaultSeq='psim'
993  self.DIGIDefaultSeq='pdigi'
995  self.DIGI2RAWDefaultSeq='DigiToRaw'
996  self.HLTDefaultSeq='GRun'
997  self.L1DefaultSeq=None
1002  self.RAW2DIGIDefaultSeq='RawToDigi'
1003  self.L1RecoDefaultSeq='L1Reco'
1004  self.L1TrackTriggerDefaultSeq='L1TrackTrigger'
1005  if self._options.fast or ('RAW2DIGI' in self.stepMap and 'RECO' in self.stepMap):
1006  self.RECODefaultSeq='reconstruction'
1007  else:
1008  self.RECODefaultSeq='reconstruction_fromRECO'
1009  self.RECOSIMDefaultSeq='recosim'
1011  self.L1HwValDefaultSeq='L1HwVal'
1012  self.DQMDefaultSeq='DQMOffline'
1014  self.ENDJOBDefaultSeq='endOfProcess'
1015  self.REPACKDefaultSeq='DigiToRawRepack'
1016  self.PATDefaultSeq='miniAOD'
1017  self.PATGENDefaultSeq='miniGEN'
1018  #TODO: Check based of file input
1019  self.NANOGENDefaultSeq='nanogenSequence'
1020  self.NANODefaultSeq='nanoSequence'
1022  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContent_cff"
1024  if not self._options.beamspot:
1025  self._options.beamspot=VtxSmearedDefaultKey
1026 
1027  # if its MC then change the raw2digi
1028  if self._options.isMC==True:
1029  self.RAW2DIGIDefaultCFF="Configuration/StandardSequences/RawToDigi_cff"
1030  self.RECODefaultCFF="Configuration/StandardSequences/Reconstruction_cff"
1031  self.PATDefaultCFF="Configuration/StandardSequences/PATMC_cff"
1032  self.PATGENDefaultCFF="Configuration/StandardSequences/PATGEN_cff"
1033  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineMC_cff"
1034  self.ALCADefaultCFF="Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
1035  self.NANODefaultSeq='nanoSequenceMC'
1036  else:
1037  self._options.beamspot = None
1038 
1039  #patch for gen, due to backward incompatibility
1040  if 'reGEN' in self.stepMap:
1041  self.GENDefaultSeq='fixGenInfo'
1042 
1043  if self._options.scenario=='cosmics':
1044  self._options.pileup='Cosmics'
1045  self.DIGIDefaultCFF="Configuration/StandardSequences/DigiCosmics_cff"
1046  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionCosmics_cff"
1047  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsCosmics_cff"
1048  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentCosmics_cff"
1049  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationCosmics_cff"
1050  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmics_cff"
1051  if self._options.isMC==True:
1052  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineCosmicsMC_cff"
1053  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingCosmics_cff"
1054  self.RECODefaultSeq='reconstructionCosmics'
1055  self.DQMDefaultSeq='DQMOfflineCosmics'
1056 
1057  if self._options.scenario=='HeavyIons':
1058  if not self._options.beamspot:
1059  self._options.beamspot=VtxSmearedHIDefaultKey
1060  self.HLTDefaultSeq = 'HIon'
1061  self.VALIDATIONDefaultCFF="Configuration/StandardSequences/ValidationHeavyIons_cff"
1062  self.VALIDATIONDefaultSeq=''
1063  self.EVTCONTDefaultCFF="Configuration/EventContent/EventContentHeavyIons_cff"
1064  self.RECODefaultCFF="Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1065  self.RECODefaultSeq='reconstructionHeavyIons'
1066  self.ALCADefaultCFF = "Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1067  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIons_cff"
1068  self.DQMDefaultSeq='DQMOfflineHeavyIons'
1069  self.SKIMDefaultCFF="Configuration/StandardSequences/SkimsHeavyIons_cff"
1070  self.HARVESTINGDefaultCFF="Configuration/StandardSequences/HarvestingHeavyIons_cff"
1071  if self._options.isMC==True:
1072  self.DQMOFFLINEDefaultCFF="DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff"
1073 
1074 
1077  self.USERDefaultSeq='user'
1078  self.USERDefaultCFF=None
1080  # the magnetic field
1081  self.magFieldCFF = 'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace('.','')+'_cff'
1082  self.magFieldCFF = self.magFieldCFF.replace("__",'_')
1083 
1084  # the geometry
1085  self.GeometryCFF='Configuration/StandardSequences/GeometryRecoDB_cff'
1087  simGeometry=''
1088  if self._options.fast:
1089  if 'start' in self._options.conditions.lower():
1090  self.GeometryCFF='FastSimulation/Configuration/Geometries_START_cff'
1091  else:
1092  self.GeometryCFF='FastSimulation/Configuration/Geometries_MC_cff'
1093  else:
1094  def inGeometryKeys(opt):
1095  from Configuration.StandardSequences.GeometryConf import GeometryConf
1096  if opt in GeometryConf:
1097  return GeometryConf[opt]
1098  else:
1099  return opt
1100 
1101  geoms=self._options.geometry.split(',')
1102  if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).split(',')
1103  if len(geoms)==2:
1104  #may specify the reco geometry
1105  if '/' in geoms[1] or '_cff' in geoms[1]:
1106  self.GeometryCFF=geoms[1]
1107  else:
1108  self.GeometryCFF='Configuration/Geometry/Geometry'+geoms[1]+'_cff'
1109 
1110  if (geoms[0].startswith('DB:')):
1111  self.SimGeometryCFF='Configuration/StandardSequences/GeometrySimDB_cff'
1112  self.geometryDBLabel=geoms[0][3:]
1113  print("with DB:")
1114  else:
1115  if '/' in geoms[0] or '_cff' in geoms[0]:
1116  self.SimGeometryCFF=geoms[0]
1117  else:
1118  simGeometry=geoms[0]
1119  if self._options.gflash==True:
1120  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'GFlash_cff'
1121  else:
1122  self.SimGeometryCFF='Configuration/Geometry/Geometry'+geoms[0]+'_cff'
1123 
1124  # synchronize the geometry configuration and the FullSimulation sequence to be used
1125  if simGeometry not in defaultOptions.geometryExtendedOptions:
1126  self.SIMDefaultCFF="Configuration/StandardSequences/SimIdeal_cff"
1127 
1128  if self._options.scenario=='nocoll' or self._options.scenario=='cosmics':
1129  self.SIMDefaultCFF="Configuration/StandardSequences/SimNOBEAM_cff"
1130  self._options.beamspot='NoSmear'
1131 
1132  # fastsim requires some changes to the default cff files and sequences
1133  if self._options.fast:
1134  self.SIMDefaultCFF = 'FastSimulation.Configuration.SimIdeal_cff'
1135  self.RECODefaultCFF= 'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1136  self.RECOBEFMIXDefaultCFF = 'FastSimulation.Configuration.Reconstruction_BefMix_cff'
1137  self.RECOBEFMIXDefaultSeq = 'reconstruction_befmix'
1138  self.NANODefaultSeq = 'nanoSequenceFS'
1139  self.DQMOFFLINEDefaultCFF="DQMOffline.Configuration.DQMOfflineFS_cff"
1140 
1141  # Mixing
1142  if self._options.pileup=='default':
1143  from Configuration.StandardSequences.Mixing import MixingDefaultKey
1144  self._options.pileup=MixingDefaultKey
1145 
1146 
1147  #not driven by a default cff anymore
1148  if self._options.isData:
1149  self._options.pileup=None
1150 
1151 
1154  # for alca, skims, etc
1155  def addExtraStream(self, name, stream, workflow='full'):
1156  # define output module and go from there
1157  output = cms.OutputModule("PoolOutputModule")
1158  if stream.selectEvents.parameters_().__len__()!=0:
1159  output.SelectEvents = stream.selectEvents
1160  else:
1161  output.SelectEvents = cms.untracked.PSet()
1162  output.SelectEvents.SelectEvents=cms.vstring()
1163  if isinstance(stream.paths,tuple):
1164  for path in stream.paths:
1165  output.SelectEvents.SelectEvents.append(path.label())
1166  else:
1167  output.SelectEvents.SelectEvents.append(stream.paths.label())
1168 
1169 
1170 
1171  if isinstance(stream.content,str):
1172  evtPset=getattr(self.process,stream.content)
1173  for p in evtPset.parameters_():
1174  setattr(output,p,getattr(evtPset,p))
1175  if not self._options.inlineEventContent:
1176  def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1177  return label
1178  output.outputCommands.__dict__["dumpPython"] = doNotInlineEventContent
1179  else:
1180  output.outputCommands = stream.content
1181 
1182 
1183  output.fileName = cms.untracked.string(self._options.dirout+stream.name+'.root')
1184 
1185  output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1186  filterName = cms.untracked.string(stream.name))
1187 
1188  if self._options.filtername:
1189  output.dataset.filterName= cms.untracked.string(self._options.filtername+"_"+stream.name)
1190 
1191  #add an automatic flushing to limit memory consumption
1192  output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1193 
1194  if workflow in ("producers,full"):
1195  if isinstance(stream.paths,tuple):
1196  for path in stream.paths:
1197  self.schedule.append(path)
1198  else:
1199  self.schedule.append(stream.paths)
1200 
1201 
1202  # in case of relvals we don't want to have additional outputs
1203  if (not self._options.relval) and workflow in ("full","output"):
1204  self.additionalOutputs[name] = output
1205  setattr(self.process,name,output)
1206 
1207  if workflow == 'output':
1208  # adjust the select events to the proper trigger results from previous process
1209  filterList = output.SelectEvents.SelectEvents
1210  for i, filter in enumerate(filterList):
1211  filterList[i] = filter+":"+self._options.triggerResultsProcess
1212 
1213  return output
1214 
1215  #----------------------------------------------------------------------------
1216  # here the methods to create the steps. Of course we are doing magic here ;)
1217  # prepare_STEPNAME modifies self.process and what else's needed.
1218  #----------------------------------------------------------------------------
1219 
1220  def loadDefaultOrSpecifiedCFF(self, sequence,defaultCFF):
1221  if ( len(sequence.split('.'))==1 ):
1222  l=self.loadAndRemember(defaultCFF)
1223  elif ( len(sequence.split('.'))==2 ):
1224  l=self.loadAndRemember(sequence.split('.')[0])
1225  sequence=sequence.split('.')[1]
1226  else:
1227  print("sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1228  print(sequence,"not recognized")
1229  raise
1230  return l
1231 
1232  def scheduleSequence(self,seq,prefix,what='Path'):
1233  if '*' in seq:
1234  #create only one path with all sequences in it
1235  for i,s in enumerate(seq.split('*')):
1236  if i==0:
1237  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1238  else:
1239  p=getattr(self.process,prefix)
1240  tmp = getattr(self.process, s)
1241  if isinstance(tmp, cms.Task):
1242  p.associate(tmp)
1243  else:
1244  p+=tmp
1245  self.schedule.append(getattr(self.process,prefix))
1246  return
1247  else:
1248  #create as many path as many sequences
1249  if not '+' in seq:
1250  if self.nextScheduleIsConditional:
1251  self.conditionalPaths.append(prefix)
1252  setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1253  self.schedule.append(getattr(self.process,prefix))
1254  else:
1255  for i,s in enumerate(seq.split('+')):
1256  sn=prefix+'%d'%(i)
1257  setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1258  self.schedule.append(getattr(self.process,sn))
1259  return
1260 
1261  def scheduleSequenceAtEnd(self,seq,prefix):
1262  self.scheduleSequence(seq,prefix,what='EndPath')
1263  return
1264 
1265  def prepare_ALCAPRODUCER(self, sequence = None):
1266  self.prepare_ALCA(sequence, workflow = "producers")
1267 
1268  def prepare_ALCAOUTPUT(self, sequence = None):
1269  self.prepare_ALCA(sequence, workflow = "output")
1270 
1271  def prepare_ALCA(self, sequence = None, workflow = 'full'):
1272  """ Enrich the process with alca streams """
1273  alcaConfig=self.loadDefaultOrSpecifiedCFF(sequence,self.ALCADefaultCFF)
1274  sequence = sequence.split('.')[-1]
1275 
1276  # decide which ALCA paths to use
1277  alcaList = sequence.split("+")
1278  maxLevel=0
1279  from Configuration.AlCa.autoAlca import autoAlca, AlCaNoConcurrentLumis
1280  # support @X from autoAlca.py, and recursion support: i.e T0:@Mu+@EG+...
1281  self.expandMapping(alcaList,autoAlca)
1282  self.AlCaPaths=[]
1283  for name in alcaConfig.__dict__:
1284  alcastream = getattr(alcaConfig,name)
1285  shortName = name.replace('ALCARECOStream','')
1286  if shortName in alcaList and isinstance(alcastream,cms.FilteredStream):
1287  if shortName in AlCaNoConcurrentLumis:
1288  print("Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".format(shortName))
1289  self._options.nConcurrentLumis = "1"
1290  self._options.nConcurrentIOVs = "1"
1291  output = self.addExtraStream(name,alcastream, workflow = workflow)
1292  self.executeAndRemember('process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+'_noDrop.outputCommands)')
1293  self.AlCaPaths.append(shortName)
1294  if 'DQM' in alcaList:
1295  if not self._options.inlineEventContent and hasattr(self.process,name):
1296  self.executeAndRemember('process.' + name + '.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1297  else:
1298  output.outputCommands.append("keep *_MEtoEDMConverter_*_*")
1299 
1300  #rename the HLT process name in the alca modules
1301  if self._options.hltProcess or 'HLT' in self.stepMap:
1302  if isinstance(alcastream.paths,tuple):
1303  for path in alcastream.paths:
1304  self.renameHLTprocessInSequence(path.label())
1305  else:
1306  self.renameHLTprocessInSequence(alcastream.paths.label())
1307 
1308  for i in range(alcaList.count(shortName)):
1309  alcaList.remove(shortName)
1310 
1311  # DQM needs a special handling
1312  elif name == 'pathALCARECODQM' and 'DQM' in alcaList:
1313  path = getattr(alcaConfig,name)
1314  self.schedule.append(path)
1315  alcaList.remove('DQM')
1316 
1317  if isinstance(alcastream,cms.Path):
1318  #black list the alca path so that they do not appear in the cfg
1319  self.blacklist_paths.append(alcastream)
1320 
1321 
1322  if len(alcaList) != 0:
1323  available=[]
1324  for name in alcaConfig.__dict__:
1325  alcastream = getattr(alcaConfig,name)
1326  if isinstance(alcastream,cms.FilteredStream):
1327  available.append(name.replace('ALCARECOStream',''))
1328  print("The following alcas could not be found "+str(alcaList))
1329  print("available ",available)
1330  #print "verify your configuration, ignoring for now"
1331  raise Exception("The following alcas could not be found "+str(alcaList))
1332 
1333  def prepare_LHE(self, sequence = None):
1334  #load the fragment
1335 
1336  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','').replace('/','.')
1337  print("Loading lhe fragment from",loadFragment)
1338  __import__(loadFragment)
1339  self.process.load(loadFragment)
1340 
1341  self._options.inlineObjets+=','+sequence
1342 
1343  getattr(self.process,sequence).nEvents = int(self._options.number)
1344 
1345  #schedule it
1346  self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1347  self.excludedPaths.append("lhe_step")
1348  self.schedule.append( self.process.lhe_step )
1349 
1350  def prepare_GEN(self, sequence = None):
1351  """ load the fragment of generator configuration """
1352  loadFailure=False
1353  #remove trailing .py
1354  #support old style .cfi by changing into something.cfi into something_cfi
1355  #remove python/ from the name
1356  loadFragment = self._options.evt_type.replace('.py','',).replace('.','_').replace('python/','')
1357  #standard location of fragments
1358  if not '/' in loadFragment:
1359  loadFragment='Configuration.Generator.'+loadFragment
1360  else:
1361  loadFragment=loadFragment.replace('/','.')
1362  try:
1363  print("Loading generator fragment from",loadFragment)
1364  __import__(loadFragment)
1365  except:
1366  loadFailure=True
1367  #if self.process.source and self.process.source.type_()=='EmptySource':
1368  if not (self._options.filein or self._options.dasquery):
1369  raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1370 
1371  if not loadFailure:
1372  from Configuration.Generator.concurrentLumisDisable import noConcurrentLumiGenerators
1373 
1374  generatorModule=sys.modules[loadFragment]
1375  genModules=generatorModule.__dict__
1376  #remove lhe producer module since this should have been
1377  #imported instead in the LHE step
1378  if self.LHEDefaultSeq in genModules:
1379  del genModules[self.LHEDefaultSeq]
1380 
1381  if self._options.hideGen:
1382  self.loadAndRemember(loadFragment)
1383  else:
1384  self.process.load(loadFragment)
1385  # expose the objects from that fragment to the configuration
1386  import FWCore.ParameterSet.Modules as cmstypes
1387  for name in genModules:
1388  theObject = getattr(generatorModule,name)
1389  if isinstance(theObject, cmstypes._Module):
1390  self._options.inlineObjets=name+','+self._options.inlineObjets
1391  if theObject.type_() in noConcurrentLumiGenerators:
1392  print("Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".format(theObject.type_()))
1393  self._options.nConcurrentLumis = "1"
1394  self._options.nConcurrentIOVs = "1"
1395  elif isinstance(theObject, cms.Sequence) or isinstance(theObject, cmstypes.ESProducer):
1396  self._options.inlineObjets+=','+name
1397 
1398  if sequence == self.GENDefaultSeq or sequence == 'pgen_genonly':
1399  if 'ProductionFilterSequence' in genModules and ('generator' in genModules):
1400  self.productionFilterSequence = 'ProductionFilterSequence'
1401  elif 'generator' in genModules:
1402  self.productionFilterSequence = 'generator'
1403 
1404  """ Enrich the schedule with the rest of the generation step """
1405  self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1406  genSeqName=sequence.split('.')[-1]
1407 
1408  if True:
1409  try:
1410  from Configuration.StandardSequences.VtxSmeared import VtxSmeared
1411  cffToBeLoaded=VtxSmeared[self._options.beamspot]
1412  self.loadAndRemember(cffToBeLoaded)
1413  except ImportError:
1414  raise Exception("VertexSmearing type or beamspot "+self._options.beamspot+" unknown.")
1415 
1416  if self._options.scenario == 'HeavyIons':
1417  if self._options.pileup=='HiMixGEN':
1418  self.loadAndRemember("Configuration/StandardSequences/GeneratorMix_cff")
1419  elif self._options.pileup=='HiMixEmbGEN':
1420  self.loadAndRemember("Configuration/StandardSequences/GeneratorEmbMix_cff")
1421  else:
1422  self.loadAndRemember("Configuration/StandardSequences/GeneratorHI_cff")
1423 
1424  self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1425  self.schedule.append(self.process.generation_step)
1426 
1427  #register to the genstepfilter the name of the path (static right now, but might evolve)
1428  self.executeAndRemember('process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1429 
1430  if 'reGEN' in self.stepMap or sequence == 'pgen_smear':
1431  #stop here
1432  return
1433 
1434  """ Enrich the schedule with the summary of the filter step """
1435  #the gen filter in the endpath
1436  self.loadAndRemember("GeneratorInterface/Core/genFilterSummary_cff")
1437  self.scheduleSequenceAtEnd('genFilterSummary','genfiltersummary_step')
1438  return
1439 
1440  def prepare_SIM(self, sequence = None):
1441  """ Enrich the schedule with the simulation step"""
1442  self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1443  if not self._options.fast:
1444  if self._options.gflash==True:
1445  self.loadAndRemember("Configuration/StandardSequences/GFlashSIM_cff")
1446 
1447  if self._options.magField=='0T':
1448  self.executeAndRemember("process.g4SimHits.UseMagneticField = cms.bool(False)")
1449  else:
1450  if self._options.magField=='0T':
1451  self.executeAndRemember("process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1452 
1453  self.scheduleSequence(sequence.split('.')[-1],'simulation_step')
1454  return
1455 
1456  def prepare_DIGI(self, sequence = None):
1457  """ Enrich the schedule with the digitisation step"""
1458  self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1459 
1460  if self._options.gflash==True:
1461  self.loadAndRemember("Configuration/StandardSequences/GFlashDIGI_cff")
1462 
1463  if sequence == 'pdigi_valid' or sequence == 'pdigi_hi':
1464  self.executeAndRemember("process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1465 
1466  if sequence != 'pdigi_nogen' and sequence != 'pdigi_valid_nogen' and sequence != 'pdigi_hi_nogen' and not self.process.source.type_()=='EmptySource' and not self._options.filetype == "LHE":
1467  if self._options.inputEventContent=='':
1468  self._options.inputEventContent='REGEN'
1469  else:
1470  self._options.inputEventContent=self._options.inputEventContent+',REGEN'
1471 
1472 
1473  self.scheduleSequence(sequence.split('.')[-1],'digitisation_step')
1474  return
1475 
1476  def prepare_CFWRITER(self, sequence = None):
1477  """ Enrich the schedule with the crossing frame writer step"""
1479  self.scheduleSequence('pcfw','cfwriter_step')
1480  return
1481 
1482  def prepare_DATAMIX(self, sequence = None):
1483  """ Enrich the schedule with the digitisation step"""
1485  self.scheduleSequence('pdatamix','datamixing_step')
1486 
1487  if self._options.pileup_input:
1488  theFiles=''
1489  if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
1490  theFiles=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1491  elif self._options.pileup_input.startswith("filelist:"):
1492  theFiles= (filesFromList(self._options.pileup_input[9:]))[0]
1493  else:
1494  theFiles=self._options.pileup_input.split(',')
1495  #print theFiles
1496  self.executeAndRemember( "process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1497 
1498  return
1499 
1500  def prepare_DIGI2RAW(self, sequence = None):
1502  self.scheduleSequence(sequence.split('.')[-1],'digi2raw_step')
1503  return
1504 
1505  def prepare_REPACK(self, sequence = None):
1507  self.scheduleSequence(sequence.split('.')[-1],'digi2repack_step')
1508  return
1509 
1510  def prepare_L1(self, sequence = None):
1511  """ Enrich the schedule with the L1 simulation step"""
1512  assert(sequence == None)
1513  self.loadAndRemember(self.L1EMDefaultCFF)
1514  self.scheduleSequence('SimL1Emulator','L1simulation_step')
1515  return
1516 
1517  def prepare_L1REPACK(self, sequence = None):
1518  """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1519  supported = ['GT','GT1','GT2','GCTGT','Full','FullSimTP','FullMC','Full2015Data','uGT','CalouGT']
1520  if sequence in supported:
1521  self.loadAndRemember('Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1522  if self._options.scenario == 'HeavyIons':
1523  self.renameInputTagsInSequence("SimL1Emulator","rawDataCollector","rawDataRepacker")
1524  self.scheduleSequence('SimL1Emulator','L1RePack_step')
1525  else:
1526  print("L1REPACK with '",sequence,"' is not supported! Supported choices are: ",supported)
1527  raise Exception('unsupported feature')
1528 
1529  def prepare_HLT(self, sequence = None):
1530  """ Enrich the schedule with the HLT simulation step"""
1531  if not sequence:
1532  print("no specification of the hlt menu has been given, should never happen")
1533  raise Exception('no HLT sequence provided')
1534 
1535  if '@' in sequence:
1536  # case where HLT:@something was provided
1537  from Configuration.HLT.autoHLT import autoHLT
1538  key = sequence[1:]
1539  if key in autoHLT:
1540  sequence = autoHLT[key]
1541  else:
1542  raise ValueError('no HLT mapping key "%s" found in autoHLT' % key)
1543 
1544  if ',' in sequence:
1545  #case where HLT:something:something was provided
1546  self.executeAndRemember('import HLTrigger.Configuration.Utilities')
1547  optionsForHLT = {}
1548  if self._options.scenario == 'HeavyIons':
1549  optionsForHLT['type'] = 'HIon'
1550  else:
1551  optionsForHLT['type'] = 'GRun'
1552  optionsForHLTConfig = ', '.join('%s=%s' % (key, repr(val)) for (key, val) in optionsForHLT.items())
1553  if sequence == 'run,fromSource':
1554  if hasattr(self.process.source,'firstRun'):
1555  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1556  elif hasattr(self.process.source,'setRunNumber'):
1557  self.executeAndRemember('process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1558  else:
1559  raise Exception('Cannot replace menu to load %s'%(sequence))
1560  else:
1561  self.executeAndRemember('process.loadHltConfiguration("%s",%s)'%(sequence.replace(',',':'),optionsForHLTConfig))
1562  else:
1563  self.loadAndRemember('HLTrigger/Configuration/HLT_%s_cff' % sequence)
1564 
1565  if self._options.isMC:
1566  self._options.customisation_file.append("HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1567 
1568  if self._options.name != 'HLT':
1569  self.additionalCommands.append('from HLTrigger.Configuration.CustomConfigs import ProcessName')
1570  self.additionalCommands.append('process = ProcessName(process)')
1571  self.additionalCommands.append('')
1572  from HLTrigger.Configuration.CustomConfigs import ProcessName
1573  self.process = ProcessName(self.process)
1574 
1575  if self.process.schedule == None:
1576  raise Exception('the HLT step did not attach a valid schedule to the process')
1577 
1578  self.scheduleIndexOfFirstHLTPath = len(self.schedule)
1579  [self.blacklist_paths.append(path) for path in self.process.schedule if isinstance(path,(cms.Path,cms.EndPath))]
1580 
1581  # this is a fake, to be removed with fastim migration and HLT menu dump
1582  if self._options.fast:
1583  if not hasattr(self.process,'HLTEndSequence'):
1584  self.executeAndRemember("process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1585 
1586 
1587  def prepare_RAW2RECO(self, sequence = None):
1588  if ','in sequence:
1589  seqReco=sequence.split(',')[1]
1590  seqDigi=sequence.split(',')[0]
1591  else:
1592  print("RAW2RECO requires two specifications",sequence,"insufficient")
1593 
1594  self.prepare_RAW2DIGI(seqDigi)
1595  self.prepare_RECO(seqReco)
1596  return
1597 
1598  def prepare_RAW2DIGI(self, sequence = "RawToDigi"):
1600  self.scheduleSequence(sequence,'raw2digi_step')
1601  # if self._options.isRepacked:
1602  #self.renameInputTagsInSequence(sequence)
1603  return
1604 
1605  def prepare_PATFILTER(self, sequence=None):
1606  self.loadAndRemember("PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1607  from PhysicsTools.PatAlgos.slimming.metFilterPaths_cff import allMetFilterPaths
1608  for filt in allMetFilterPaths:
1609  self.schedule.append(getattr(self.process,'Flag_'+filt))
1610 
1611  def prepare_L1HwVal(self, sequence = 'L1HwVal'):
1612  ''' Enrich the schedule with L1 HW validation '''
1613  self.loadDefaultOrSpecifiedCFF(sequence,self.L1HwValDefaultCFF)
1614  #self.scheduleSequence(sequence.split('.')[-1],'l1hwval_step')
1615  print('\n\n\n DEPRECATED this has no action \n\n\n')
1616  return
1617 
1618  def prepare_L1Reco(self, sequence = "L1Reco"):
1619  ''' Enrich the schedule with L1 reconstruction '''
1620  self.loadDefaultOrSpecifiedCFF(sequence,self.L1RecoDefaultCFF)
1621  self.scheduleSequence(sequence.split('.')[-1],'L1Reco_step')
1622  return
1623 
1624  def prepare_L1TrackTrigger(self, sequence = "L1TrackTrigger"):
1625  ''' Enrich the schedule with L1 reconstruction '''
1627  self.scheduleSequence(sequence.split('.')[-1],'L1TrackTrigger_step')
1628  return
1629 
1630  def prepare_FILTER(self, sequence = None):
1631  ''' Enrich the schedule with a user defined filter sequence '''
1632 
1633  filterConfig=self.load(sequence.split('.')[0])
1634  filterSeq=sequence.split('.')[-1]
1635 
1636  class PrintAllModules(object):
1637  def __init__(self):
1638  self.inliner=''
1639  pass
1640  def enter(self,visitee):
1641  try:
1642  label=visitee.label()
1643 
1644  self.inliner=label+','+self.inliner
1645  except:
1646  pass
1647  def leave(self,v): pass
1648 
1649  expander=PrintAllModules()
1650  getattr(self.process,filterSeq).visit( expander )
1651  self._options.inlineObjets+=','+expander.inliner
1652  self._options.inlineObjets+=','+filterSeq
1653 
1654 
1655  self.scheduleSequence(filterSeq,'filtering_step')
1656  self.nextScheduleIsConditional=True
1657 
1658  self.productionFilterSequence = filterSeq
1659 
1660  return
1661 
1662  def prepare_RECO(self, sequence = "reconstruction"):
1663  ''' Enrich the schedule with reconstruction '''
1664  self.loadDefaultOrSpecifiedCFF(sequence,self.RECODefaultCFF)
1665  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_step')
1666  return
1667 
1668  def prepare_RECOSIM(self, sequence = "recosim"):
1669  ''' Enrich the schedule with reconstruction '''
1670  self.loadDefaultOrSpecifiedCFF(sequence,self.RECOSIMDefaultCFF)
1671  self.scheduleSequence(sequence.split('.')[-1],'recosim_step')
1672  return
1673 
1674  def prepare_RECOBEFMIX(self, sequence = "reconstruction"):
1675  ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1676  if not self._options.fast:
1677  print("ERROR: this step is only implemented for FastSim")
1678  sys.exit()
1680  self.scheduleSequence(sequence.split('.')[-1],'reconstruction_befmix_step')
1681  return
1682 
1683  def prepare_PAT(self, sequence = "miniAOD"):
1684  ''' Enrich the schedule with PAT '''
1685  self.prepare_PATFILTER(self)
1686  self.loadDefaultOrSpecifiedCFF(sequence,self.PATDefaultCFF)
1687  self.labelsToAssociate.append('patTask')
1688  if self._options.isData:
1689  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1690  else:
1691  if self._options.fast:
1692  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1693  else:
1694  self._options.customisation_file_unsch.insert(0,"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1695 
1696  if self._options.hltProcess:
1697  if len(self._options.customise_commands) > 1:
1698  self._options.customise_commands = self._options.customise_commands + " \n"
1699  self._options.customise_commands = self._options.customise_commands + "process.patTrigger.processName = \""+self._options.hltProcess+"\"\n"
1700  self._options.customise_commands = self._options.customise_commands + "process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1701  self._options.customise_commands = self._options.customise_commands + "process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1702 
1703 # self.renameHLTprocessInSequence(sequence)
1704 
1705  return
1706 
1707  def prepare_PATGEN(self, sequence = "miniGEN"):
1708  ''' Enrich the schedule with PATGEN '''
1709  self.loadDefaultOrSpecifiedCFF(sequence,self.PATGENDefaultCFF) #this is unscheduled
1710  self.labelsToAssociate.append('patGENTask')
1711  if self._options.isData:
1712  raise Exception("PATGEN step can only run on MC")
1713  return
1714 
1715  def prepare_NANO(self, sequence = "nanoAOD"):
1716  ''' Enrich the schedule with NANO '''
1717  self.loadDefaultOrSpecifiedCFF(sequence,self.NANODefaultCFF)
1718  self.scheduleSequence(sequence.split('.')[-1],'nanoAOD_step')
1719  custom = "nanoAOD_customizeData" if self._options.isData else "nanoAOD_customizeMC"
1720  self._options.customisation_file.insert(0,"PhysicsTools/NanoAOD/nano_cff."+custom)
1721  if self._options.hltProcess:
1722  if len(self._options.customise_commands) > 1:
1723  self._options.customise_commands = self._options.customise_commands + " \n"
1724  self._options.customise_commands = self._options.customise_commands + "process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+"' )\n"
1725 
1726  def prepare_NANOGEN(self, sequence = "nanoAOD"):
1727  ''' Enrich the schedule with NANOGEN '''
1728  # TODO: Need to modify this based on the input file type
1729  fromGen = any([x in self.stepMap for x in ['LHE', 'GEN', 'AOD']])
1730  self.loadDefaultOrSpecifiedCFF(sequence,self.NANOGENDefaultCFF)
1731  self.scheduleSequence(sequence.split('.')[-1],'nanoAOD_step')
1732  custom = "customizeNanoGEN" if fromGen else "customizeNanoGENFromMini"
1733  if self._options.runUnscheduled:
1734  self._options.customisation_file_unsch.insert(0, '.'.join([self.NANOGENDefaultCFF, custom]))
1735  else:
1736  self._options.customisation_file.insert(0, '.'.join([self.NANOGENDefaultCFF, custom]))
1737 
1738  def prepare_SKIM(self, sequence = "all"):
1739  ''' Enrich the schedule with skimming fragments'''
1740  skimConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.SKIMDefaultCFF)
1741  sequence = sequence.split('.')[-1]
1742 
1743  skimlist=sequence.split('+')
1744 
1745  from Configuration.Skimming.autoSkim import autoSkim
1746  self.expandMapping(skimlist,autoSkim)
1747 
1748  #print "dictionnary for skims:",skimConfig.__dict__
1749  for skim in skimConfig.__dict__:
1750  skimstream = getattr(skimConfig,skim)
1751  if isinstance(skimstream,cms.Path):
1752  #black list the alca path so that they do not appear in the cfg
1753  self.blacklist_paths.append(skimstream)
1754  if (not isinstance(skimstream,cms.FilteredStream)):
1755  continue
1756  shortname = skim.replace('SKIMStream','')
1757  if (sequence=="all"):
1758  self.addExtraStream(skim,skimstream)
1759  elif (shortname in skimlist):
1760  self.addExtraStream(skim,skimstream)
1761  #add a DQM eventcontent for this guy
1762  if self._options.datatier=='DQM':
1763  self.process.load(self.EVTCONTDefaultCFF)
1764  skimstreamDQM = cms.FilteredStream(
1765  responsible = skimstream.responsible,
1766  name = skimstream.name+'DQM',
1767  paths = skimstream.paths,
1768  selectEvents = skimstream.selectEvents,
1769  content = self._options.datatier+'EventContent',
1770  dataTier = cms.untracked.string(self._options.datatier)
1771  )
1772  self.addExtraStream(skim+'DQM',skimstreamDQM)
1773  for i in range(skimlist.count(shortname)):
1774  skimlist.remove(shortname)
1775 
1776 
1777 
1778  if (skimlist.__len__()!=0 and sequence!="all"):
1779  print('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1780  raise Exception('WARNING, possible typo with SKIM:'+'+'.join(skimlist))
1781 
1782  def prepare_USER(self, sequence = None):
1783  ''' Enrich the schedule with a user defined sequence '''
1784  self.loadDefaultOrSpecifiedCFF(sequence,self.USERDefaultCFF)
1785  self.scheduleSequence(sequence.split('.')[-1],'user_step')
1786  return
1787 
1788  def prepare_POSTRECO(self, sequence = None):
1789  """ Enrich the schedule with the postreco step """
1791  self.scheduleSequence('postreco_generator','postreco_step')
1792  return
1793 
1794 
1795  def prepare_VALIDATION(self, sequence = 'validation'):
1796  print(sequence,"in preparing validation")
1798  from Validation.Configuration.autoValidation import autoValidation
1799  #in case VALIDATION:something:somethingelse -> something,somethingelse
1800  sequence=sequence.split('.')[-1]
1801  if sequence.find(',')!=-1:
1802  prevalSeqName=sequence.split(',')[0].split('+')
1803  valSeqName=sequence.split(',')[1].split('+')
1804  self.expandMapping(prevalSeqName,autoValidation,index=0)
1805  self.expandMapping(valSeqName,autoValidation,index=1)
1806  else:
1807  if '@' in sequence:
1808  prevalSeqName=sequence.split('+')
1809  valSeqName=sequence.split('+')
1810  self.expandMapping(prevalSeqName,autoValidation,index=0)
1811  self.expandMapping(valSeqName,autoValidation,index=1)
1812  else:
1813  postfix=''
1814  if sequence:
1815  postfix='_'+sequence
1816  prevalSeqName=['prevalidation'+postfix]
1817  valSeqName=['validation'+postfix]
1818  if not hasattr(self.process,valSeqName[0]):
1819  prevalSeqName=['']
1820  valSeqName=[sequence]
1821 
1822  def NFI(index):
1823 
1824  if index==0:
1825  return ''
1826  else:
1827  return '%s'%index
1828 
1829 
1830  #rename the HLT process in validation steps
1831  if ('HLT' in self.stepMap and not self._options.fast) or self._options.hltProcess:
1832  for s in valSeqName+prevalSeqName:
1833  if s:
1835  for (i,s) in enumerate(prevalSeqName):
1836  if s:
1837  setattr(self.process,'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1838  self.schedule.append(getattr(self.process,'prevalidation_step%s'%NFI(i)))
1839 
1840  for (i,s) in enumerate(valSeqName):
1841  setattr(self.process,'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1842  self.schedule.append(getattr(self.process,'validation_step%s'%NFI(i)))
1843 
1844  #needed in case the miniAODValidation sequence is run starting from AODSIM
1845  if 'PAT' in self.stepMap and not 'RECO' in self.stepMap:
1846  return
1847 
1848  if not 'DIGI' in self.stepMap and not self._options.fast and not any(map( lambda s : s.startswith('genvalid'), valSeqName)):
1849  if self._options.restoreRNDSeeds==False and not self._options.restoreRNDSeeds==True:
1850  self._options.restoreRNDSeeds=True
1851 
1852  if not 'DIGI' in self.stepMap and not self._options.isData and not self._options.fast:
1853  self.executeAndRemember("process.mix.playback = True")
1854  self.executeAndRemember("process.mix.digitizers = cms.PSet()")
1855  self.executeAndRemember("for a in process.aliases: delattr(process, a)")
1856  self._options.customisation_file.append("SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1857 
1858  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1859  #will get in the schedule, smoothly
1860  for (i,s) in enumerate(valSeqName):
1861  getattr(self.process,'validation_step%s'%NFI(i)).insert(0, self.process.genstepfilter)
1862 
1863  return
1864 
1865 
1867  """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1868  It will climb down within PSets, VPSets and VInputTags to find its target"""
1869  def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1870  self._paramReplace = paramReplace
1871  self._paramSearch = paramSearch
1872  self._verbose = verbose
1873  self._whitelist = whitelist
1875  def doIt(self,pset,base):
1876  if isinstance(pset, cms._Parameterizable):
1877  for name in pset.parameters_().keys():
1878  # skip whitelisted parameters
1879  if name in self._whitelist:
1880  continue
1881  # if I use pset.parameters_().items() I get copies of the parameter values
1882  # so I can't modify the nested pset
1883  value = getattr(pset,name)
1884  type = value.pythonTypeName()
1885  if type in ('cms.PSet', 'cms.untracked.PSet'):
1886  self.doIt(value,base+"."+name)
1887  elif type in ('cms.VPSet', 'cms.untracked.VPSet'):
1888  for (i,ps) in enumerate(value): self.doIt(ps, "%s.%s[%d]"%(base,name,i) )
1889  elif type in ('cms.string', 'cms.untracked.string'):
1890  if value.value() == self._paramSearch:
1891  if self._verbose: print("set string process name %s.%s %s ==> %s"% (base, name, value, self._paramReplace))
1892  setattr(pset, name,self._paramReplace)
1893  elif type in ('cms.VInputTag', 'cms.untracked.VInputTag'):
1894  for (i,n) in enumerate(value):
1895  if not isinstance(n, cms.InputTag):
1896  n=cms.InputTag(n)
1897  if n.processName == self._paramSearch:
1898  # VInputTag can be declared as a list of strings, so ensure that n is formatted correctly
1899  if self._verbose:print("set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self._paramReplace))
1900  setattr(n,"processName",self._paramReplace)
1901  value[i]=n
1902  elif type in ('cms.vstring', 'cms.untracked.vstring'):
1903  for (i,n) in enumerate(value):
1904  if n==self._paramSearch:
1905  getattr(pset,name)[i]=self._paramReplace
1906  elif type in ('cms.InputTag', 'cms.untracked.InputTag'):
1907  if value.processName == self._paramSearch:
1908  if self._verbose: print("set process name %s.%s %s ==> %s " % (base, name, value, self._paramReplace))
1909  setattr(getattr(pset, name),"processName",self._paramReplace)
1910 
1911  def enter(self,visitee):
1912  label = ''
1913  try:
1914  label = visitee.label()
1915  except AttributeError:
1916  label = '<Module not in a Process>'
1917  except:
1918  label = 'other execption'
1919  self.doIt(visitee, label)
1920 
1921  def leave(self,visitee):
1922  pass
1923 
1924  #visit a sequence to repalce all input tags
1925  def renameInputTagsInSequence(self,sequence,oldT="rawDataCollector",newT="rawDataRepacker"):
1926  print("Replacing all InputTag %s => %s"%(oldT,newT))
1927  from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag
1928  massSearchReplaceAnyInputTag(getattr(self.process,sequence),oldT,newT)
1929  loadMe='from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1930  if not loadMe in self.additionalCommands:
1931  self.additionalCommands.append(loadMe)
1932  self.additionalCommands.append('massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1933 
1934  #change the process name used to address HLT results in any sequence
1935  def renameHLTprocessInSequence(self,sequence,proc=None,HLTprocess='HLT'):
1936  if self._options.hltProcess:
1937  proc=self._options.hltProcess
1938  else:
1939  proc=self.process.name_()
1940  if proc==HLTprocess: return
1941  # look up all module in dqm sequence
1942  print("replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc))
1943  getattr(self.process,sequence).visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor(HLTprocess,proc,whitelist = ("subSystemFolder",)))
1944  if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1945  self.additionalCommands.append('from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1946  self.additionalCommands.append('process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1947 
1948 
1949  def expandMapping(self,seqList,mapping,index=None):
1950  maxLevel=30
1951  level=0
1952  while '@' in repr(seqList) and level<maxLevel:
1953  level+=1
1954  for specifiedCommand in seqList:
1955  if specifiedCommand.startswith('@'):
1956  location=specifiedCommand[1:]
1957  if not location in mapping:
1958  raise Exception("Impossible to map "+location+" from "+repr(mapping))
1959  mappedTo=mapping[location]
1960  if index!=None:
1961  mappedTo=mappedTo[index]
1962  seqList.remove(specifiedCommand)
1963  seqList.extend(mappedTo.split('+'))
1964  break;
1965  if level==maxLevel:
1966  raise Exception("Could not fully expand "+repr(seqList)+" from "+repr(mapping))
1967 
1968  def prepare_DQM(self, sequence = 'DQMOffline'):
1969  # this one needs replacement
1970 
1971  # any 'DQM' job should use DQMStore in non-legacy mode (but not HARVESTING)
1972  self.loadAndRemember("DQMServices/Core/DQMStoreNonLegacy_cff")
1974  sequenceList=sequence.split('.')[-1].split('+')
1975  postSequenceList=sequence.split('.')[-1].split('+')
1976  from DQMOffline.Configuration.autoDQM import autoDQM
1977  self.expandMapping(sequenceList,autoDQM,index=0)
1978  self.expandMapping(postSequenceList,autoDQM,index=1)
1979 
1980  if len(set(sequenceList))!=len(sequenceList):
1981  sequenceList=list(set(sequenceList))
1982  print("Duplicate entries for DQM:, using",sequenceList)
1983 
1984  pathName='dqmoffline_step'
1985  for (i,sequence) in enumerate(sequenceList):
1986  if (i!=0):
1987  pathName='dqmoffline_%d_step'%(i)
1988 
1989  if 'HLT' in self.stepMap.keys() or self._options.hltProcess:
1990  self.renameHLTprocessInSequence(sequence)
1991 
1992  setattr(self.process,pathName, cms.EndPath( getattr(self.process,sequence ) ) )
1993  self.schedule.append(getattr(self.process,pathName))
1994 
1995  if hasattr(self.process,"genstepfilter") and len(self.process.genstepfilter.triggerConditions):
1996  #will get in the schedule, smoothly
1997  getattr(self.process,pathName).insert(0,self.process.genstepfilter)
1998 
1999 
2000  pathName='dqmofflineOnPAT_step'
2001  for (i,sequence) in enumerate(postSequenceList):
2002  #Fix needed to avoid duplication of sequences not defined in autoDQM or without a PostDQM
2003  if (sequenceList[i]==postSequenceList[i]):
2004  continue
2005  if (i!=0):
2006  pathName='dqmofflineOnPAT_%d_step'%(i)
2007 
2008  setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
2009  self.schedule.append(getattr(self.process,pathName))
2010 
2011  def prepare_HARVESTING(self, sequence = None):
2012  """ Enrich the process with harvesting step """
2013  self.DQMSaverCFF='Configuration/StandardSequences/DQMSaver'+self._options.harvesting+'_cff'
2015 
2016  harvestingConfig = self.loadDefaultOrSpecifiedCFF(sequence,self.HARVESTINGDefaultCFF)
2017  sequence = sequence.split('.')[-1]
2018 
2019  # decide which HARVESTING paths to use
2020  harvestingList = sequence.split("+")
2021  from DQMOffline.Configuration.autoDQM import autoDQM
2022  from Validation.Configuration.autoValidation import autoValidation
2023  import copy
2024  combined_mapping = copy.deepcopy( autoDQM )
2025  combined_mapping.update( autoValidation )
2026  self.expandMapping(harvestingList,combined_mapping,index=-1)
2027 
2028  if len(set(harvestingList))!=len(harvestingList):
2029  harvestingList=list(set(harvestingList))
2030  print("Duplicate entries for HARVESTING, using",harvestingList)
2031 
2032  for name in harvestingList:
2033  if not name in harvestingConfig.__dict__:
2034  print(name,"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2035  # trigger hard error, like for other sequence types
2036  getattr(self.process, name)
2037  continue
2038  harvestingstream = getattr(harvestingConfig,name)
2039  if isinstance(harvestingstream,cms.Path):
2040  self.schedule.append(harvestingstream)
2041  self.blacklist_paths.append(harvestingstream)
2042  if isinstance(harvestingstream,cms.Sequence):
2043  setattr(self.process,name+"_step",cms.Path(harvestingstream))
2044  self.schedule.append(getattr(self.process,name+"_step"))
2045 
2046  self.scheduleSequence('DQMSaver','dqmsave_step')
2047  return
2048 
2049  def prepare_ALCAHARVEST(self, sequence = None):
2050  """ Enrich the process with AlCaHarvesting step """
2051  harvestingConfig = self.loadAndRemember(self.ALCAHARVESTDefaultCFF)
2052  sequence=sequence.split(".")[-1]
2053 
2054  # decide which AlcaHARVESTING paths to use
2055  harvestingList = sequence.split("+")
2056 
2057 
2058 
2059  from Configuration.AlCa.autoPCL import autoPCL
2060  self.expandMapping(harvestingList,autoPCL)
2061 
2062  for name in harvestingConfig.__dict__:
2063  harvestingstream = getattr(harvestingConfig,name)
2064  if name in harvestingList and isinstance(harvestingstream,cms.Path):
2065  self.schedule.append(harvestingstream)
2066  if isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_dbOutput"), cms.VPSet) and \
2067  isinstance(getattr(harvestingConfig,"ALCAHARVEST" + name + "_metadata"), cms.VPSet):
2068  self.executeAndRemember("process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name + "_dbOutput)")
2069  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name + "_metadata)")
2070  else:
2071  self.executeAndRemember("process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name + "_dbOutput)")
2072  self.executeAndRemember("process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name + "_metadata)")
2073  harvestingList.remove(name)
2074  # append the common part at the end of the sequence
2075  lastStep = getattr(harvestingConfig,"ALCAHARVESTDQMSaveAndMetadataWriter")
2076  self.schedule.append(lastStep)
2077 
2078  if len(harvestingList) != 0 and 'dummyHarvesting' not in harvestingList :
2079  print("The following harvesting could not be found : ", harvestingList)
2080  raise Exception("The following harvesting could not be found : "+str(harvestingList))
2081 
2082 
2083 
2084  def prepare_ENDJOB(self, sequence = 'endOfProcess'):
2086  self.scheduleSequenceAtEnd(sequence.split('.')[-1],'endjob_step')
2087  return
2088 
2089  def finalizeFastSimHLT(self):
2090  self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2091  self.schedule.append(self.process.reconstruction)
2092 
2093 
2094  def build_production_info(self, evt_type, evtnumber):
2095  """ Add useful info for the production. """
2096  self.process.configurationMetadata=cms.untracked.PSet\
2097  (version=cms.untracked.string("$Revision: 1.19 $"),
2098  name=cms.untracked.string("Applications"),
2099  annotation=cms.untracked.string(evt_type+ " nevts:"+str(evtnumber))
2100  )
2101 
2102  self.addedObjects.append(("Production Info","configurationMetadata"))
2103 
2104 
2105  def create_process(self):
2106  self.pythonCfgCode = "# Auto generated configuration file\n"
2107  self.pythonCfgCode += "# using: \n# "+__version__[1:-1]+"\n# "+__source__[1:-1]+'\n'
2108  self.pythonCfgCode += "# with command line options: "+self._options.arguments+'\n'
2109  self.pythonCfgCode += "import FWCore.ParameterSet.Config as cms\n\n"
2110 
2111  # now set up the modifies
2112  modifiers=[]
2113  modifierStrings=[]
2114  modifierImports=[]
2115 
2116  if hasattr(self._options,"era") and self._options.era :
2117  # Multiple eras can be specified in a comma seperated list
2118  from Configuration.StandardSequences.Eras import eras
2119  for requestedEra in self._options.era.split(",") :
2120  modifierStrings.append(requestedEra)
2121  modifierImports.append(eras.pythonCfgLines[requestedEra])
2122  modifiers.append(getattr(eras,requestedEra))
2123 
2124 
2125  if hasattr(self._options,"procModifiers") and self._options.procModifiers:
2126  import importlib
2127  thingsImported=[]
2128  for c in self._options.procModifiers:
2129  thingsImported.extend(c.split(","))
2130  for pm in thingsImported:
2131  modifierStrings.append(pm)
2132  modifierImports.append('from Configuration.ProcessModifiers.'+pm+'_cff import '+pm)
2133  modifiers.append(getattr(importlib.import_module('Configuration.ProcessModifiers.'+pm+'_cff'),pm))
2134 
2135  self.pythonCfgCode += '\n'.join(modifierImports)+'\n\n'
2136  self.pythonCfgCode += "process = cms.Process('"+self._options.name+"'" # Start of the line, finished after the loop
2137 
2138 
2139  if len(modifierStrings)>0:
2140  self.pythonCfgCode+= ','+','.join(modifierStrings)
2141  self.pythonCfgCode+=')\n\n'
2142 
2143  #yes, the cfg code gets out of sync here if a process is passed in. That could be fixed in the future
2144  #assuming there is some way for the fwk to get the list of modifiers (and their stringified name)
2145  if self.process == None:
2146  if len(modifiers)>0:
2147  self.process = cms.Process(self._options.name,*modifiers)
2148  else:
2149  self.process = cms.Process(self._options.name)
2150 
2151 
2152 
2153 
2154  def prepare(self, doChecking = False):
2155  """ Prepare the configuration string and add missing pieces."""
2156 
2157  self.loadAndRemember(self.EVTCONTDefaultCFF) #load the event contents regardless
2158  self.addMaxEvents()
2159  if self.with_input:
2160  self.addSource()
2161  self.addStandardSequences()
2162 
2163  self.completeInputCommand()
2164  self.addConditions()
2165 
2166 
2167  outputModuleCfgCode=""
2168  if not 'HARVESTING' in self.stepMap.keys() and not 'ALCAHARVEST' in self.stepMap.keys() and not 'ALCAOUTPUT' in self.stepMap.keys() and self.with_output:
2169  outputModuleCfgCode=self.addOutput()
2170 
2171  self.addCommon()
2172 
2173  self.pythonCfgCode += "# import of standard configurations\n"
2174  for module in self.imports:
2175  self.pythonCfgCode += ("process.load('"+module+"')\n")
2176 
2177  # production info
2178  if not hasattr(self.process,"configurationMetadata"):
2179  self.build_production_info(self._options.evt_type, self._options.number)
2180  else:
2181  #the PSet was added via a load
2182  self.addedObjects.append(("Production Info","configurationMetadata"))
2183 
2184  self.pythonCfgCode +="\n"
2185  for comment,object in self.addedObjects:
2186  if comment!="":
2187  self.pythonCfgCode += "\n# "+comment+"\n"
2188  self.pythonCfgCode += dumpPython(self.process,object)
2189 
2190  # dump the output definition
2191  self.pythonCfgCode += "\n# Output definition\n"
2192  self.pythonCfgCode += outputModuleCfgCode
2193 
2194  # dump all additional outputs (e.g. alca or skim streams)
2195  self.pythonCfgCode += "\n# Additional output definition\n"
2196  #I do not understand why the keys are not normally ordered.
2197  nl=sorted(self.additionalOutputs.keys())
2198  for name in nl:
2199  output = self.additionalOutputs[name]
2200  self.pythonCfgCode += "process.%s = %s" %(name, output.dumpPython())
2201  tmpOut = cms.EndPath(output)
2202  setattr(self.process,name+'OutPath',tmpOut)
2203  self.schedule.append(tmpOut)
2204 
2205  # dump all additional commands
2206  self.pythonCfgCode += "\n# Other statements\n"
2207  for command in self.additionalCommands:
2208  self.pythonCfgCode += command + "\n"
2209 
2210  #comma separated list of objects that deserve to be inlined in the configuration (typically from a modified config deep down)
2211  for object in self._options.inlineObjets.split(','):
2212  if not object:
2213  continue
2214  if not hasattr(self.process,object):
2215  print('cannot inline -'+object+'- : not known')
2216  else:
2217  self.pythonCfgCode +='\n'
2218  self.pythonCfgCode +=dumpPython(self.process,object)
2219 
2220  if self._options.pileup=='HiMixEmbGEN':
2221  self.pythonCfgCode += "\nprocess.generator.embeddingMode=cms.int32(1)\n"
2222 
2223  # dump all paths
2224  self.pythonCfgCode += "\n# Path and EndPath definitions\n"
2225  for path in self.process.paths:
2226  if getattr(self.process,path) not in self.blacklist_paths:
2227  self.pythonCfgCode += dumpPython(self.process,path)
2228 
2229  for endpath in self.process.endpaths:
2230  if getattr(self.process,endpath) not in self.blacklist_paths:
2231  self.pythonCfgCode += dumpPython(self.process,endpath)
2232 
2233  # dump the schedule
2234  self.pythonCfgCode += "\n# Schedule definition\n"
2235 
2236  # handling of the schedule
2237  pathNames = ['process.'+p.label_() for p in self.schedule]
2238  if self.process.schedule == None:
2239  self.process.schedule = cms.Schedule()
2240  for item in self.schedule:
2241  self.process.schedule.append(item)
2242  result = 'process.schedule = cms.Schedule('+','.join(pathNames)+')\n'
2243  else:
2244  if not isinstance(self.scheduleIndexOfFirstHLTPath, int):
2245  raise Exception('the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2246 
2247  for index, item in enumerate(self.schedule):
2248  if index < self.scheduleIndexOfFirstHLTPath:
2249  self.process.schedule.insert(index, item)
2250  else:
2251  self.process.schedule.append(item)
2252 
2253  result = "# process.schedule imported from cff in HLTrigger.Configuration\n"
2254  for index, item in enumerate(pathNames[:self.scheduleIndexOfFirstHLTPath]):
2255  result += 'process.schedule.insert('+str(index)+', '+item+')\n'
2256  if self.scheduleIndexOfFirstHLTPath < len(pathNames):
2257  result += 'process.schedule.extend(['+','.join(pathNames[self.scheduleIndexOfFirstHLTPath:])+'])\n'
2258 
2259  self.pythonCfgCode += result
2260 
2261  for labelToAssociate in self.labelsToAssociate:
2262  self.process.schedule.associate(getattr(self.process, labelToAssociate))
2263  self.pythonCfgCode += 'process.schedule.associate(process.' + labelToAssociate + ')\n'
2264 
2265  from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
2267  self.pythonCfgCode+="from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n"
2268  self.pythonCfgCode+="associatePatAlgosToolsTask(process)\n"
2269 
2270  overrideThreads = (self._options.nThreads != "1")
2271  overrideConcurrentLumis = (self._options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2272  overrideConcurrentIOVs = (self._options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2273 
2274  if overrideThreads or overrideConcurrentLumis or overrideConcurrentIOVs:
2275  self.pythonCfgCode +="\n"
2276  self.pythonCfgCode +="#Setup FWK for multithreaded\n"
2277  if overrideThreads:
2278  self.pythonCfgCode +="process.options.numberOfThreads = "+self._options.nThreads+"\n"
2279  self.pythonCfgCode +="process.options.numberOfStreams = "+self._options.nStreams+"\n"
2280  self.process.options.numberOfThreads = int(self._options.nThreads)
2281  self.process.options.numberOfStreams = int(self._options.nStreams)
2282  if overrideConcurrentLumis:
2283  self.pythonCfgCode +="process.options.numberOfConcurrentLuminosityBlocks = "+self._options.nConcurrentLumis+"\n"
2284  self.process.options.numberOfConcurrentLuminosityBlocks = int(self._options.nConcurrentLumis)
2285  if overrideConcurrentIOVs:
2286  self.pythonCfgCode +="process.options.eventSetup.numberOfConcurrentIOVs = "+self._options.nConcurrentIOVs+"\n"
2287  self.process.options.eventSetup.numberOfConcurrentIOVs = int(self._options.nConcurrentIOVs)
2288 
2289  if self._options.accelerators is not None:
2290  accelerators = self._options.accelerators.split(',')
2291  self.pythonCfgCode += "\n"
2292  self.pythonCfgCode += "# Enable only these accelerator backends\n"
2293  self.pythonCfgCode += "process.load('Configuration.StandardSequences.Accelerators_cff')\n"
2294  self.pythonCfgCode += "process.options.accelerators = ['" + "', '".join(accelerators) + "']\n"
2295  self.process.load('Configuration.StandardSequences.Accelerators_cff')
2296  self.process.options.accelerators = accelerators
2297 
2298  #repacked version
2299  if self._options.isRepacked:
2300  self.pythonCfgCode +="\n"
2301  self.pythonCfgCode +="from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2302  self.pythonCfgCode +="MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n"
2303  MassReplaceInputTag(self.process, new="rawDataMapperByLabel", old="rawDataCollector")
2304 
2305  # special treatment in case of production filter sequence 2/2
2306  if self.productionFilterSequence and not (self._options.pileup=='HiMixEmbGEN'):
2307  self.pythonCfgCode +='# filter all path with the production filter sequence\n'
2308  self.pythonCfgCode +='for path in process.paths:\n'
2309  if len(self.conditionalPaths):
2310  self.pythonCfgCode +='\tif not path in %s: continue\n'%str(self.conditionalPaths)
2311  if len(self.excludedPaths):
2312  self.pythonCfgCode +='\tif path in %s: continue\n'%str(self.excludedPaths)
2313  self.pythonCfgCode +='\tgetattr(process,path).insert(0, process.%s)\n'%(self.productionFilterSequence,)
2314  pfs = getattr(self.process,self.productionFilterSequence)
2315  for path in self.process.paths:
2316  if not path in self.conditionalPaths: continue
2317  if path in self.excludedPaths: continue
2318  getattr(self.process,path).insert(0, pfs)
2319 
2320 
2321  # dump customise fragment
2322  self.pythonCfgCode += self.addCustomise()
2323 
2324  if self._options.runUnscheduled:
2325  print("--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2326  # Keep the "unscheduled customise functions" separate for now,
2327  # there are customize functions given by users (in our unit
2328  # tests) that need to be run before the "unscheduled customise
2329  # functions"
2330  self.pythonCfgCode += self.addCustomise(1)
2331 
2332  self.pythonCfgCode += self.addCustomiseCmdLine()
2333 
2334  if hasattr(self.process,"logErrorHarvester"):
2335  #configure logErrorHarvester to wait for same EDProducers to finish as the OutputModules
2336  self.pythonCfgCode +="\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n"
2337  self.pythonCfgCode +="from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n"
2338  self.pythonCfgCode +="process = customiseLogErrorHarvesterUsingOutputCommands(process)\n"
2339  from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands
2341 
2342  # Temporary hack to put the early delete customization after
2343  # everything else
2344  #
2345  # FIXME: remove when no longer needed
2346  self.pythonCfgCode += "\n# Add early deletion of temporary data products to reduce peak memory need\n"
2347  self.pythonCfgCode += "from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2348  self.pythonCfgCode += "process = customiseEarlyDelete(process)\n"
2349  self.pythonCfgCode += "# End adding early deletion\n"
2350  from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
2351  self.process = customiseEarlyDelete(self.process)
2352 
2353  imports = cms.specialImportRegistry.getSpecialImports()
2354  if len(imports) > 0:
2355  #need to inject this at the top
2356  index = self.pythonCfgCode.find("import FWCore.ParameterSet.Config")
2357  #now find the end of line
2358  index = self.pythonCfgCode.find("\n",index)
2359  self.pythonCfgCode = self.pythonCfgCode[:index]+ "\n" + "\n".join(imports)+"\n" +self.pythonCfgCode[index:]
2360 
2361 
2362  # make the .io file
2363 
2364  if self._options.io:
2365  #io=open(self._options.python_filename.replace('.py','.io'),'w')
2366  if not self._options.io.endswith('.io'): self._option.io+='.io'
2367  io=open(self._options.io,'w')
2368  ioJson={}
2369  if hasattr(self.process.source,"fileNames"):
2370  if len(self.process.source.fileNames.value()):
2371  ioJson['primary']=self.process.source.fileNames.value()
2372  if hasattr(self.process.source,"secondaryFileNames"):
2373  if len(self.process.source.secondaryFileNames.value()):
2374  ioJson['secondary']=self.process.source.secondaryFileNames.value()
2375  if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
2376  ioJson['pileup']=self._options.pileup_input[4:]
2377  for (o,om) in self.process.outputModules_().items():
2378  ioJson[o]=om.fileName.value()
2379  ioJson['GT']=self.process.GlobalTag.globaltag.value()
2380  if self.productionFilterSequence:
2381  ioJson['filter']=self.productionFilterSequence
2382  import json
2383  io.write(json.dumps(ioJson))
2384  return
2385 
2386 
def load(self, includeFile)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:37
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
Definition: MassReplace.py:79
def prepare_L1REPACK(self, sequence=None)
inliner
load the relevant part
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
Definition: FindCaloHit.cc:19
def filesFromDASQuery(query, option="", s=None)
assert(be >=bs)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def ProcessName(process)
Definition: CustomConfigs.py:8
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_RECO(self, sequence="reconstruction")
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def prepare_PATGEN(self, sequence="miniGEN")
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def prepare_RECOSIM(self, sequence="recosim")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
Definition: HCMethods.h:50
def prepare_DIGI(self, sequence=None)
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_NANO(self, sequence="nanoAOD")
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
Definition: RemoteFile.cc:19
def prepare_ALCAHARVEST(self, sequence=None)
def defineMixing(dict)
Definition: Mixing.py:193
def dumpPython(process, name)
def miniAOD_customizeOutput(out)
def encode(args, files)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
Definition: helpers.py:24
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_NANOGEN(self, sequence="nanoAOD")
def prepare_L1(self, sequence=None)
def lumi_to_run(runs, events_in_sample, events_per_job)
Definition: LumiToRun.py:1
def scheduleSequenceAtEnd(self, seq, prefix)
#define str(s)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def executeAndRemember(self, command)
def prepare_PAT(self, sequence="miniAOD")
nextScheduleIsConditional
put the filtering path in the schedule