3 __version__ =
"$Revision: 1.19 $" 4 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 6 import FWCore.ParameterSet.Config
as cms
7 from FWCore.ParameterSet.Modules
import _Module
10 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
16 from subprocess
import Popen,PIPE
17 import FWCore.ParameterSet.DictTypes
as DictTypes
18 from FWCore.ParameterSet.OrderedSet
import OrderedSet
24 defaultOptions.datamix =
'DataOnSim' 25 defaultOptions.isMC=
False 26 defaultOptions.isData=
True 27 defaultOptions.step=
'' 28 defaultOptions.pileup=
'NoPileUp' 29 defaultOptions.pileup_input =
None 30 defaultOptions.pileup_dasoption =
'' 31 defaultOptions.geometry =
'SimDB' 32 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
33 defaultOptions.magField =
'' 34 defaultOptions.conditions =
None 35 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
36 defaultOptions.harvesting=
'AtRunEnd' 37 defaultOptions.gflash =
False 38 defaultOptions.number = -1
39 defaultOptions.number_out =
None 40 defaultOptions.arguments =
"" 41 defaultOptions.name =
"NO NAME GIVEN" 42 defaultOptions.evt_type =
"" 43 defaultOptions.filein =
"" 44 defaultOptions.dasquery=
"" 45 defaultOptions.dasoption=
"" 46 defaultOptions.secondfilein =
"" 47 defaultOptions.customisation_file = []
48 defaultOptions.customisation_file_unsch = []
49 defaultOptions.customise_commands =
"" 50 defaultOptions.inline_custom=
False 51 defaultOptions.particleTable =
'pythiapdt' 52 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
53 defaultOptions.dirin =
'' 54 defaultOptions.dirout =
'' 55 defaultOptions.filetype =
'EDM' 56 defaultOptions.fileout =
'output.root' 57 defaultOptions.filtername =
'' 58 defaultOptions.lazy_download =
False 59 defaultOptions.custom_conditions =
'' 60 defaultOptions.hltProcess =
'' 61 defaultOptions.eventcontent =
None 62 defaultOptions.datatier =
None 63 defaultOptions.inlineEventContent =
True 64 defaultOptions.inlineObjects =
'' 65 defaultOptions.hideGen=
False 66 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
67 defaultOptions.beamspot=
None 68 defaultOptions.outputDefinition =
'' 69 defaultOptions.inputCommands =
None 70 defaultOptions.outputCommands =
None 71 defaultOptions.inputEventContent =
'' 72 defaultOptions.dropDescendant =
False 73 defaultOptions.relval =
None 74 defaultOptions.prefix =
None 75 defaultOptions.profile =
None 76 defaultOptions.heap_profile =
None 77 defaultOptions.maxmem_profile =
None 78 defaultOptions.isRepacked =
False 79 defaultOptions.restoreRNDSeeds =
False 80 defaultOptions.donotDropOnInput =
'' 81 defaultOptions.python_filename =
'' 82 defaultOptions.io=
None 83 defaultOptions.lumiToProcess=
None 84 defaultOptions.fast=
False 85 defaultOptions.runsAndWeightsForMC =
None 86 defaultOptions.runsScenarioForMC =
None 87 defaultOptions.runsAndWeightsForMCIntegerWeights =
None 88 defaultOptions.runsScenarioForMCIntegerWeights =
None 89 defaultOptions.runUnscheduled =
False 90 defaultOptions.timeoutOutput =
False 91 defaultOptions.nThreads = 1
92 defaultOptions.nStreams = 0
93 defaultOptions.nConcurrentLumis = 0
94 defaultOptions.nConcurrentIOVs = 0
95 defaultOptions.accelerators =
None 99 theObject = getattr(process,name)
100 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
101 return "process."+name+
" = " + theObject.dumpPython()
102 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
103 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 105 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 108 import FWCore.ParameterSet.Config
as cms
111 for line
in open(fileName,
'r'): 112 if line.count(
".root")>=2:
114 entries=line.replace(
"\n",
"").
split()
115 prim.append(entries[0])
116 sec.append(entries[1])
117 elif (line.find(
".root")!=-1):
118 entry=line.replace(
"\n",
"")
122 prim = [f
for f
in prim
if not (f
in file_seen
or file_seen.add(f))]
124 sec = [f
for f
in sec
if not (f
in file_seen
or file_seen.add(f))]
126 if not hasattr(s,
"fileNames"):
127 s.fileNames=cms.untracked.vstring(prim)
129 s.fileNames.extend(prim)
131 if not hasattr(s,
"secondaryFileNames"):
132 s.secondaryFileNames=cms.untracked.vstring(sec)
134 s.secondaryFileNames.extend(sec)
135 print(
"found files: ",prim)
137 raise Exception(
"There are not files in input from the file list")
139 print(
"found parent files:",sec)
144 import FWCore.ParameterSet.Config
as cms
147 print(
"the query is",query)
150 while eC!=0
and count<3:
152 print(
'Sleeping, then retrying DAS')
154 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True, universal_newlines=
True)
156 tupleP = os.waitpid(p.pid, 0)
160 print(
"DAS succeeded after",count,
"attempts",eC)
162 print(
"DAS failed 3 times- I give up")
163 for line
in pipe.split(
'\n'):
164 if line.count(
".root")>=2:
166 entries=line.replace(
"\n",
"").
split()
167 prim.append(entries[0])
168 sec.append(entries[1])
169 elif (line.find(
".root")!=-1):
170 entry=line.replace(
"\n",
"")
173 prim = sorted(list(set(prim)))
174 sec = sorted(list(set(sec)))
176 if not hasattr(s,
"fileNames"):
177 s.fileNames=cms.untracked.vstring(prim)
179 s.fileNames.extend(prim)
181 if not hasattr(s,
"secondaryFileNames"):
182 s.secondaryFileNames=cms.untracked.vstring(sec)
184 s.secondaryFileNames.extend(sec)
185 print(
"found files: ",prim)
187 print(
"found parent files:",sec)
190 def anyOf(listOfKeys,dict,opt=None):
199 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
202 """The main building routines """ 204 def __init__(self, options, process = None, with_output = False, with_input = False ):
205 """options taken from old cmsDriver and optparse """ 207 options.outfile_name = options.dirout+options.fileout
211 if self.
_options.isData
and options.isMC:
212 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
218 if (hasattr(self.
_options,
"outputDefinition")
and \
219 self.
_options.outputDefinition !=
'' and \
220 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self.
_options.outputDefinition)))
or \
221 (hasattr(self.
_options,
"datatier")
and \
224 print(
"removing ENDJOB from steps since not compatible with DQMIO dataTier")
230 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
233 for step
in self.
_options.step.split(
","):
234 if step==
'':
continue 235 stepParts = step.split(
":")
236 stepName = stepParts[0]
237 if stepName
not in stepList
and not stepName.startswith(
're'):
238 raise ValueError(
"Step {} unknown. Available are {}".
format( stepName , sorted(stepList)))
239 if len(stepParts)==1:
241 elif len(stepParts)==2:
243 elif len(stepParts)==3:
244 self.
stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
246 raise ValueError(f
"Step definition {step} invalid")
254 if hasattr(self.
_options,
"no_output_flag")
and self.
_options.no_output_flag:
282 Function to add the igprof profile service so that you can dump in the middle 285 profileOpts = self.
_options.profile.split(
':')
287 profilerInterval = 100
288 profilerFormat =
None 289 profilerJobFormat =
None 295 startEvent = profileOpts.pop(0)
296 if not startEvent.isdigit():
297 raise Exception(
"%s is not a number" % startEvent)
298 profilerStart =
int(startEvent)
300 eventInterval = profileOpts.pop(0)
301 if not eventInterval.isdigit():
302 raise Exception(
"%s is not a number" % eventInterval)
303 profilerInterval =
int(eventInterval)
305 profilerFormat = profileOpts.pop(0)
308 if not profilerFormat:
309 profilerFormat =
"%s___%s___%%I.gz" % (
310 self.
_options.evt_type.replace(
"_cfi",
""),
316 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
317 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
318 elif not profilerJobFormat:
319 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 321 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
326 Function to add the jemalloc heap profile service so that you can dump in the middle 331 profilerInterval = 100
332 profilerFormat =
"jeprof_%s.heap" 333 profilerJobFormat =
None 336 if not profilerJobFormat
and profilerFormat.endswith(
".heap"):
337 profilerJobFormat = profilerFormat.replace(
".heap",
"_EndOfJob.heap")
338 elif not profilerJobFormat:
339 profilerJobFormat = profilerFormat +
"_EndOfJob.heap" 341 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
344 includeFile = includeFile.replace(
'/',
'.')
346 return sys.modules[includeFile]
349 """helper routine to load am memorize imports""" 352 includeFile = includeFile.replace(
'/',
'.')
355 return sys.modules[includeFile]
358 """helper routine to remember replace statements""" 360 if not command.strip().startswith(
"#"):
363 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
368 self.
process.options.Rethrow = [
'ProductNotFound']
369 self.
process.options.fileMode =
'FULLMERGE' 374 self.
process.AdaptorConfig = cms.Service(
"AdaptorConfig",
375 stats = cms.untracked.bool(
True),
376 enable = cms.untracked.bool(
True),
377 cacheHint = cms.untracked.string(
"lazy-download"),
378 readHint = cms.untracked.string(
"read-ahead-buffered")
387 self.
process.IgProfService = cms.Service(
"IgProfService",
388 reportFirstEvent = cms.untracked.int32(start),
389 reportEventInterval = cms.untracked.int32(interval),
390 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
391 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
396 self.
process.JeProfService = cms.Service(
"JeProfService",
397 reportFirstEvent = cms.untracked.int32(start),
398 reportEventInterval = cms.untracked.int32(interval),
399 reportToFileAtPostEvent = cms.untracked.string(
"%s"%(eventFormat)),
400 reportToFileAtPostEndJob = cms.untracked.string(
"%s"%(jobFormat)))
404 """Here we decide how many evts will be processed""" 411 """Here the source is built. Priority: file, generator""" 414 def filesFromOption(self):
415 for entry
in self.
_options.filein.split(
','):
417 if entry.startswith(
"filelist:"):
419 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
424 if not hasattr(self.
process.source,
"secondaryFileNames"):
425 raise Exception(
"--secondfilein not compatible with "+self.
_options.filetype+
"input type")
426 for entry
in self.
_options.secondfilein.split(
','):
428 if entry.startswith(
"filelist:"):
430 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
433 self.
process.source.secondaryFileNames.append(self.
_options.dirin+entry)
437 self.
process.source=cms.Source(
"PoolSource",
438 fileNames = cms.untracked.vstring(),
439 secondaryFileNames= cms.untracked.vstring())
440 filesFromOption(self)
441 elif self.
_options.filetype ==
"DAT":
442 self.
process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
443 filesFromOption(self)
444 elif self.
_options.filetype ==
"LHE":
445 self.
process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
446 if self.
_options.filein.startswith(
"lhe:"):
448 args=self.
_options.filein.split(
':')
450 print(
'LHE input from article ',article)
451 location=
'/store/lhe/' 453 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
454 for line
in textOfFiles:
455 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
456 self.
process.source.fileNames.append(location+article+
'/'+fileName)
459 print(
'Issue to load LHE files, please check and try again.')
462 if len(self.
process.source.fileNames)==0:
463 print(
'Issue with empty filename, but can pass line check')
466 self.
process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
468 filesFromOption(self)
470 elif self.
_options.filetype ==
"DQM":
471 self.
process.source=cms.Source(
"DQMRootSource",
472 fileNames = cms.untracked.vstring())
473 filesFromOption(self)
475 elif self.
_options.filetype ==
"DQMDAQ":
477 self.
process.source=cms.Source(
"DQMStreamerReader")
481 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
484 self.
process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
488 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
493 self.
_options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 495 self.
_options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 498 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
499 for command
in self.
_options.inputCommands.split(
','):
501 command = command.strip()
502 if command==
'':
continue 503 self.
process.source.inputCommands.append(command)
504 if not self.
_options.dropDescendant:
505 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
508 import FWCore.PythonUtilities.LumiList
as LumiList
512 if self.
process.source
is None:
513 self.
process.source=cms.Source(
"EmptySource")
519 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
520 if self.
_options.runsAndWeightsForMC:
523 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
524 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMC], str):
525 __import__(RunsAndWeights[self.
_options.runsScenarioForMC])
526 self.
runsAndWeights = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMC]].runProbabilityDistribution
531 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
538 if self.
_options.runsAndWeightsForMCIntegerWeights
or self.
_options.runsScenarioForMCIntegerWeights:
540 raise Exception(
"options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
541 if self.
_options.runsAndWeightsForMCIntegerWeights:
544 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
545 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights], str):
546 __import__(RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights])
547 self.
runsAndWeightsInt = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
553 raise Exception(
"--relval option required when using --runsAndWeightsInt")
555 from SimGeneral.Configuration.LumiToRun
import lumi_to_run
556 total_events, events_per_job = self.
_options.relval.split(
',')
558 self.
additionalCommands.
append(
"process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " +
str(lumi_to_run_mapping) +
"])")
563 """ Add output module to the process """ 567 print(
"--datatier & --eventcontent options ignored")
570 outList = eval(self.
_options.outputDefinition)
571 for (id,outDefDict)
in enumerate(outList):
572 outDefDictStr=outDefDict.__str__()
573 if not isinstance(outDefDict,dict):
574 raise Exception(
"--output needs to be passed a list of dict"+self.
_options.outputDefinition+
" is invalid")
576 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
579 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
580 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
581 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
582 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
583 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
585 if not theModuleLabel:
586 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
587 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
588 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 590 for name
in tryNames:
591 if not hasattr(self.
process,name):
594 if not theModuleLabel:
595 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
597 defaultFileName=self.
_options.outfile_name
599 defaultFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
601 theFileName=self.
_options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
602 if not theFileName.endswith(
'.root'):
606 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
607 if theStreamType==
'DQMIO': theStreamType=
'DQM' 608 if theStreamType==
'ALL':
609 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
611 theEventContent = getattr(self.
process, theStreamType+
"EventContent")
615 if theStreamType==
'ALCARECO' and not theFilterName:
616 theFilterName=
'StreamALCACombined' 619 CppType=
'PoolOutputModule' 621 CppType=
'TimeoutPoolOutputModule' 622 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 623 output = cms.OutputModule(CppType,
624 theEventContent.clone(),
625 fileName = cms.untracked.string(theFileName),
626 dataset = cms.untracked.PSet(
627 dataTier = cms.untracked.string(theTier),
628 filterName = cms.untracked.string(theFilterName))
630 if not theSelectEvent
and hasattr(self.
process,
'generation_step')
and theStreamType!=
'LHE':
631 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
632 if not theSelectEvent
and hasattr(self.
process,
'filtering_step'):
633 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
635 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
638 if not hasattr(output,
'SelectEvents'):
639 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
641 output.SelectEvents.SelectEvents.extend(getattr(self.
process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
644 if hasattr(self.
process,theModuleLabel):
645 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
647 setattr(self.
process,theModuleLabel,output)
648 outputModule=getattr(self.
process,theModuleLabel)
649 setattr(self.
process,theModuleLabel+
'_step',cms.EndPath(outputModule))
650 path=getattr(self.
process,theModuleLabel+
'_step')
653 if not self.
_options.inlineEventContent
and hasattr(self.
process,theStreamType+
"EventContent"):
654 def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process.
"+theStreamType+"EventContent.outputCommands)
"): return label
655 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
656 if theExtraOutputCommands:
657 if not isinstance(theExtraOutputCommands,list):
658 raise Exception(
"extra ouput command in --option must be a list of strings")
659 if hasattr(self.
process,theStreamType+
"EventContent"):
660 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
662 outputModule.outputCommands.extend(theExtraOutputCommands)
664 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
669 streamTypes=self.
_options.eventcontent.split(
',')
670 tiers=self.
_options.datatier.split(
',')
671 if not self.
_options.outputDefinition
and len(streamTypes)!=len(tiers):
672 raise Exception(
"number of event content arguments does not match number of datatier arguments")
678 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
679 if streamType==
'':
continue 680 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self.
_options.step:
continue 681 if streamType==
'DQMIO': streamType=
'DQM' 682 eventContent=streamType
684 if streamType ==
"NANOEDMAOD" :
685 eventContent =
"NANOAOD" 686 elif streamType ==
"NANOEDMAODSIM" :
687 eventContent =
"NANOAODSIM" 688 theEventContent = getattr(self.
process, eventContent+
"EventContent")
690 theFileName=self.
_options.outfile_name
691 theFilterName=self.
_options.filtername
693 theFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
694 theFilterName=self.
_options.filtername
695 CppType=
'PoolOutputModule' 697 CppType=
'TimeoutPoolOutputModule' 698 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 699 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 700 output = cms.OutputModule(CppType,
702 fileName = cms.untracked.string(theFileName),
703 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
704 filterName = cms.untracked.string(theFilterName)
707 if hasattr(self.
process,
"generation_step")
and streamType!=
'LHE':
708 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
709 if hasattr(self.
process,
"filtering_step"):
710 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
712 if streamType==
'ALCARECO':
713 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
715 if "MINIAOD" in streamType:
716 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
719 outputModuleName=streamType+
'output' 720 setattr(self.
process,outputModuleName,output)
721 outputModule=getattr(self.
process,outputModuleName)
722 setattr(self.
process,outputModuleName+
'_step',cms.EndPath(outputModule))
723 path=getattr(self.
process,outputModuleName+
'_step')
726 if self.
_options.outputCommands
and streamType!=
'DQM':
727 for evct
in self.
_options.outputCommands.split(
','):
728 if not evct:
continue 729 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
731 if not self.
_options.inlineEventContent:
732 tmpstreamType=streamType
733 if "NANOEDM" in tmpstreamType :
734 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
735 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
737 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
739 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
745 Add selected standard sequences to the process 749 pileupSpec=self.
_options.pileup.split(
',')[0]
752 pileups_without_input=[defaultOptions.pileup,
"Cosmics",
"default",
"HiMixNoPU",
None]
753 if self.
_options.pileup
not in pileups_without_input
and self.
_options.pileup_input==
None:
754 message =
"Pileup scenerio requires input files. Please add an appropriate --pileup_input option" 758 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
759 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
760 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
764 if '.' in pileupSpec:
765 mixingDict={
'file':pileupSpec}
766 elif pileupSpec.startswith(
'file:'):
767 mixingDict={
'file':pileupSpec[5:]}
770 mixingDict=copy.copy(Mixing[pileupSpec])
771 if len(self.
_options.pileup.split(
','))>1:
772 mixingDict.update(eval(self.
_options.pileup[self.
_options.pileup.find(
',')+1:]))
775 if 'file:' in pileupSpec:
778 print(
"inlining mixing module configuration")
783 mixingDict.pop(
'file')
786 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
788 elif self.
_options.pileup_input.startswith(
"filelist:"):
791 mixingDict[
'F']=self.
_options.pileup_input.split(
',')
793 for command
in specialization:
795 if len(mixingDict)!=0:
796 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
817 stepSpec = self.
stepMap[stepName]
818 print(
"Step:", stepName,
"Spec:",stepSpec)
819 if stepName.startswith(
're'):
821 if stepName[2:]
not in self.
_options.donotDropOnInput:
822 self.
_options.inputEventContent=
'%s,%s'%(stepName.upper(),self.
_options.inputEventContent)
823 stepName=stepName[2:]
825 getattr(self,
"prepare_"+stepName)(stepSpec = getattr(self,stepName+
"DefaultSeq"))
826 elif isinstance(stepSpec, list):
827 getattr(self,
"prepare_"+stepName)(stepSpec =
'+'.
join(stepSpec))
828 elif isinstance(stepSpec, tuple):
829 getattr(self,
"prepare_"+stepName)(stepSpec =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
831 raise ValueError(
"Invalid step definition")
833 if self.
_options.restoreRNDSeeds!=
False:
835 if self.
_options.restoreRNDSeeds==
True:
836 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
838 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self.
_options.restoreRNDSeeds))
841 self.
_options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 843 self.
_options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 849 def dropSecondDropStar(iec):
859 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
860 for evct
in self.
_options.inputEventContent.split(
','):
861 if evct==
'':
continue 862 theEventContent = getattr(self.
process, evct+
"EventContent")
863 if hasattr(theEventContent,
'outputCommands'):
864 self.
process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
865 if hasattr(theEventContent,
'inputCommands'):
866 self.
process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
868 dropSecondDropStar(self.
process.source.inputCommands)
870 if not self.
_options.dropDescendant:
871 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
877 """Add conditions to the process""" 878 if not self.
_options.conditions:
return 880 if 'FrontierConditions_GlobalTag' in self.
_options.conditions:
881 print(
'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
882 self.
_options.conditions = self.
_options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
885 from Configuration.AlCa.GlobalTag
import GlobalTag
892 """Include the customise code """ 896 for c
in self.
_options.customisation_file:
897 custOpt.extend(c.split(
","))
899 for c
in self.
_options.customisation_file_unsch:
900 custOpt.extend(c.split(
","))
906 raise Exception(
"more than . in the specification:"+opt)
907 fileName=opt.split(
'.')[0]
908 if opt.count(
'.')==0: rest=
'customise' 910 rest=opt.split(
'.')[1]
911 if rest==
'py': rest=
'customise' 913 if fileName
in custMap:
914 custMap[fileName].extend(rest.split(
'+'))
916 custMap[fileName]=rest.split(
'+')
921 final_snippet=
'\n# customisation of the process.\n' 925 allFcn.extend(custMap[opt])
927 if allFcn.count(fcn)!=1:
928 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
932 packageName = f.replace(
".py",
"").
replace(
"/",
".")
933 __import__(packageName)
934 package = sys.modules[packageName]
937 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
939 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 941 for line
in file(customiseFile,
'r'): 942 if "import FWCore.ParameterSet.Config" in line:
944 final_snippet += line
946 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
947 for fcn
in custMap[f]:
948 print(
"customising the process with",fcn,
"from",f)
949 if not hasattr(package,fcn):
951 raise Exception(
"config "+f+
" has no function "+fcn)
955 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
956 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
959 final_snippet +=
'\n# End of customisation functions\n' 965 final_snippet=
'\n# Customisation from command line\n' 966 if self.
_options.customise_commands:
968 for com
in self.
_options.customise_commands.split(
'\\n'):
971 final_snippet +=
'\n'+com
982 if self.
_options.particleTable
not in defaultOptions.particleTableList:
983 print(
'Invalid particle table provided. Options are:')
984 print(defaultOptions.particleTable)
992 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreams_cff" 998 self.
L1P2GTDefaultCFF =
'Configuration/StandardSequences/SimPhase2L1GlobalTriggerEmulator_cff' 1025 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 1064 self.
_options.beamspot=VtxSmearedDefaultKey
1069 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 1070 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 1073 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1082 if self.
_options.scenario==
'cosmics':
1084 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1085 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1086 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1096 if self.
_options.scenario==
'HeavyIons':
1098 self.
_options.beamspot=VtxSmearedHIDefaultKey
1103 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 1105 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1108 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1120 self.
magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self.
_options.magField.replace(
'.',
'')+
'_cff' 1124 self.
GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff' 1128 if 'start' in self.
_options.conditions.lower():
1129 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1131 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1133 def inGeometryKeys(opt):
1134 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1135 if opt
in GeometryConf:
1136 return GeometryConf[opt]
1140 geoms=self.
_options.geometry.split(
',')
1141 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).
split(
',')
1144 if '/' in geoms[1]
or '_cff' in geoms[1]:
1147 self.
GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1149 if (geoms[0].startswith(
'DB:')):
1150 self.
SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff' 1154 if '/' in geoms[0]
or '_cff' in geoms[0]:
1157 simGeometry=geoms[0]
1159 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1161 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1164 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1165 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1168 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1173 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1174 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1181 if self.
_options.pileup==
'default':
1182 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1183 self.
_options.pileup=MixingDefaultKey
1196 output = cms.OutputModule(
"PoolOutputModule")
1197 if stream.selectEvents.parameters_().__len__()!=0:
1198 output.SelectEvents = stream.selectEvents
1200 output.SelectEvents = cms.untracked.PSet()
1201 output.SelectEvents.SelectEvents=cms.vstring()
1202 if isinstance(stream.paths,tuple):
1203 for path
in stream.paths:
1204 output.SelectEvents.SelectEvents.append(path.label())
1206 output.SelectEvents.SelectEvents.append(stream.paths.label())
1210 if isinstance(stream.content,str):
1211 evtPset=getattr(self.process,stream.content)
1212 for p
in evtPset.parameters_():
1213 setattr(output,p,getattr(evtPset,p))
1214 if not self._options.inlineEventContent:
1215 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1217 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1219 output.outputCommands = stream.content
1222 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1224 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1225 filterName = cms.untracked.string(stream.name))
1227 if self._options.filtername:
1228 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1231 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1233 if workflow
in (
"producers,full"):
1234 if isinstance(stream.paths,tuple):
1235 for path
in stream.paths:
1236 self.schedule.
append(path)
1238 self.schedule.
append(stream.paths)
1242 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1243 self.additionalOutputs[name] = output
1244 setattr(self.process,name,output)
1246 if workflow ==
'output':
1248 filterList = output.SelectEvents.SelectEvents
1249 for i, filter
in enumerate(filterList):
1250 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1260 _dotsplit = stepSpec.split(
'.')
1261 if ( len(_dotsplit)==1 ):
1262 if '/' in _dotsplit[0]:
1263 _sequence = defaultSEQ
if defaultSEQ
else stepSpec
1266 _sequence = stepSpec
1268 elif ( len(_dotsplit)==2 ):
1269 _cff,_sequence = _dotsplit
1271 print(
"sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1272 print(stepSpec,
"not recognized")
1275 return l,_sequence,_cff
1280 for i,s
in enumerate(seq.split(
'*')):
1282 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, s) ))
1284 p=getattr(self.
process,prefix)
1285 tmp = getattr(self.
process, s)
1286 if isinstance(tmp, cms.Task):
1297 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, seq) ))
1300 for i,s
in enumerate(seq.split(
'+')):
1302 setattr(self.
process,sn,getattr(cms,what)( getattr(self.
process, s) ))
1316 def prepare_ALCA(self, stepSpec = None, workflow = 'full'):
1317 """ Enrich the process with alca streams """ 1322 alcaList = sequence.split(
"+")
1323 for alca
in alcaList:
1324 if (len(alca)>MAXLEN):
1325 raise Exception(
"The following alca "+
str(alca)+
" name (with length "+
str(len(alca))+
" chars) cannot be accepted because it exceeds the DBS constraints on the length of the name of the ALCARECOs producers ("+
str(MAXLEN)+
")!")
1328 from Configuration.AlCa.autoAlca
import autoAlca, AlCaNoConcurrentLumis
1332 for name
in alcaConfig.__dict__:
1333 alcastream = getattr(alcaConfig,name)
1334 shortName = name.replace(
'ALCARECOStream',
'')
1335 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1336 if shortName
in AlCaNoConcurrentLumis:
1337 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".
format(shortName))
1340 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1341 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1343 if 'DQM' in alcaList:
1344 if not self.
_options.inlineEventContent
and hasattr(self.
process,name):
1345 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1347 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1351 if isinstance(alcastream.paths,tuple):
1352 for path
in alcastream.paths:
1357 for i
in range(alcaList.count(shortName)):
1358 alcaList.remove(shortName)
1361 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1362 path = getattr(alcaConfig,name)
1364 alcaList.remove(
'DQM')
1366 if isinstance(alcastream,cms.Path):
1371 if len(alcaList) != 0:
1373 for name
in alcaConfig.__dict__:
1374 alcastream = getattr(alcaConfig,name)
1375 if isinstance(alcastream,cms.FilteredStream):
1376 available.append(name.replace(
'ALCARECOStream',
''))
1377 print(
"The following alcas could not be found "+
str(alcaList))
1378 print(
"available ",available)
1380 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1385 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1386 print(
"Loading lhe fragment from",loadFragment)
1387 __import__(loadFragment)
1388 self.process.
load(loadFragment)
1390 self._options.inlineObjects+=
','+stepSpec
1392 getattr(self.process,stepSpec).nEvents = self._options.number
1395 self.process.lhe_step = cms.Path( getattr( self.process,stepSpec) )
1396 self.excludedPaths.
append(
"lhe_step")
1397 self.schedule.
append( self.process.lhe_step )
1400 """ load the fragment of generator configuration """ 1407 if not '/' in loadFragment:
1408 loadFragment=
'Configuration.Generator.'+loadFragment
1410 loadFragment=loadFragment.replace(
'/',
'.')
1412 print(
"Loading generator fragment from",loadFragment)
1413 __import__(loadFragment)
1418 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1421 from Configuration.Generator.concurrentLumisDisable
import noConcurrentLumiGenerators
1423 generatorModule=sys.modules[loadFragment]
1424 genModules=generatorModule.__dict__
1435 import FWCore.ParameterSet.Modules
as cmstypes
1436 for name
in genModules:
1437 theObject = getattr(generatorModule,name)
1438 if isinstance(theObject, cmstypes._Module):
1440 if theObject.type_()
in noConcurrentLumiGenerators:
1441 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".
format(theObject.type_()))
1444 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1445 self.
_options.inlineObjects+=
','+name
1447 if stepSpec == self.
GENDefaultSeq or stepSpec ==
'pgen_genonly':
1448 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1450 elif 'generator' in genModules:
1453 """ Enrich the schedule with the rest of the generation step """ 1458 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1459 cffToBeLoaded=VtxSmeared[self.
_options.beamspot]
1462 raise Exception(
"VertexSmearing type or beamspot "+self.
_options.beamspot+
" unknown.")
1464 if self.
_options.scenario ==
'HeavyIons':
1465 if self.
_options.pileup==
'HiMixGEN':
1466 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1467 elif self.
_options.pileup==
'HiMixEmbGEN':
1468 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorEmbMix_cff")
1470 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1472 self.
process.generation_step = cms.Path( getattr(self.
process,_genSeqName) )
1476 self.
executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1478 if 'reGEN' in self.
stepMap or stepSpec ==
'pgen_smear':
1482 """ Enrich the schedule with the summary of the filter step """ 1489 """ Enrich the schedule with the simulation step""" 1499 self.
executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1505 """ Enrich the schedule with the digitisation step""" 1509 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1511 if _digiSeq ==
'pdigi_valid' or _digiSeq ==
'pdigi_hi':
1512 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1514 if _digiSeq !=
'pdigi_nogen' and _digiSeq !=
'pdigi_valid_nogen' and _digiSeq !=
'pdigi_hi_nogen' and not self.
process.source.type_()==
'EmptySource' and not self.
_options.filetype ==
"LHE":
1515 if self.
_options.inputEventContent==
'':
1516 self.
_options.inputEventContent=
'REGEN' 1525 """ Enrich the schedule with the crossing frame writer step""" 1531 """ Enrich the schedule with the digitisation step""" 1537 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
1539 elif self.
_options.pileup_input.startswith(
"filelist:"):
1542 theFiles=self.
_options.pileup_input.split(
',')
1544 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1560 menuPath = f
'L1Trigger.Configuration.Phase2GTMenus.{menuFile}' 1561 menuModule = importlib.import_module(menuPath)
1563 theMenu = menuModule.menu
1566 for triggerPathFile
in theMenu:
1569 triggerPathModule = importlib.import_module(triggerPathFile)
1570 for objName
in dir(triggerPathModule):
1571 obj = getattr(triggerPathModule, objName)
1573 if objType == cms.Path:
1574 triggerPaths.append(objName)
1576 triggerScheduleList = [getattr(self.
process, name)
for name
in triggerPaths]
1577 self.
schedule.extend(triggerScheduleList)
1582 """ Run the GT emulation sequence on top of the L1 emulation step """ 1585 self.
scheduleSequence(
'l1tGTAlgoBlockProducerSequence',
'Phase2L1GTAlgoBlockProducer')
1586 if stepSpec ==
None:
1587 defaultMenuFile =
"prototype_2023_v1_0_0" 1593 """ Enrich the schedule with the L1 simulation step""" 1600 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1601 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1602 if stepSpec
in supported:
1603 self.
loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'% stepSpec)
1604 if self.
_options.scenario ==
'HeavyIons':
1608 print(
"L1REPACK with '",stepSpec,
"' is not supported! Supported choices are: ",supported)
1612 """ Enrich the schedule with the HLT simulation step""" 1614 print(
"no specification of the hlt menu has been given, should never happen")
1615 raise Exception(
'no HLT specifications provided')
1619 from Configuration.HLT.autoHLT
import autoHLT
1622 stepSpec = autoHLT[key]
1624 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1630 if self.
_options.scenario ==
'HeavyIons':
1631 optionsForHLT[
'type'] =
'HIon' 1633 optionsForHLT[
'type'] =
'GRun' 1634 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in optionsForHLT.items())
1635 if stepSpec ==
'run,fromSource':
1636 if hasattr(self.
process.source,
'firstRun'):
1637 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1638 elif hasattr(self.
process.source,
'setRunNumber'):
1639 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1641 raise Exception(f
'Cannot replace menu to load {stepSpec}')
1643 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(stepSpec.replace(
',',
':'),optionsForHLTConfig))
1648 self.
_options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1654 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1657 if self.
process.schedule ==
None:
1658 raise Exception(
'the HLT step did not attach a valid schedule to the process')
1665 if not hasattr(self.
process,
'HLTEndSequence'):
1666 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1671 seqReco,seqDigi=stepSpec.spli(
',')
1673 print(f
"RAW2RECO requires two specifications {stepSpec} insufficient")
1687 for filt
in allMetFilterPaths:
1691 ''' Enrich the schedule with L1 HW validation ''' 1693 print(
'\n\n\n DEPRECATED this has no action \n\n\n')
1697 ''' Enrich the schedule with L1 reconstruction ''' 1703 ''' Enrich the schedule with L1 reconstruction ''' 1709 ''' Enrich the schedule with a user defined filter sequence ''' 1711 filterConfig,filterSeq = stepSpec.split(
'.')
1712 filterConfig=self.
load(filterConfig)
1714 class PrintAllModules(
object):
1718 def enter(self,visitee):
1720 label=visitee.label()
1725 def leave(self,v):
pass 1727 expander=PrintAllModules()
1728 getattr(self.
process,filterSeq).visit( expander )
1729 self.
_options.inlineObjects+=
','+expander.inliner
1730 self.
_options.inlineObjects+=
','+filterSeq
1741 ''' Enrich the schedule with reconstruction ''' 1747 ''' Enrich the schedule with reconstruction ''' 1753 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1755 print(
"ERROR: this step is only implemented for FastSim")
1762 ''' Enrich the schedule with PAT ''' 1767 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1770 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1772 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1775 if len(self.
_options.customise_commands) > 1:
1776 self.
_options.customise_commands = self.
_options.customise_commands +
" \n" 1777 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patTrigger.processName = \""+self.
_options.hltProcess+
"\"\n" 1778 self.
_options.customise_commands = self.
_options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1779 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1786 ''' Enrich the schedule with PATGEN ''' 1790 raise Exception(
"PATGEN step can only run on MC")
1795 ''' Enrich the schedule with NANO ''' 1796 if not '@' in stepSpec:
1804 from PhysicsTools.NanoAOD.autoNANO
import autoNANO, expandNanoMapping
1806 _nanoCustoms = _nanoSeq.split(
'+')
if '@' in stepSpec
else [
'']
1807 _nanoSeq = _nanoSeq.split(
'+')
1811 _nanoSeq = list(sorted(set(_nanoSeq), key=_nanoSeq.index))
1812 _nanoCustoms = list(sorted(set(_nanoCustoms), key=_nanoCustoms.index))
1814 _nanoSeq = [seq
if seq!=
'' else f
"{self.NANODefaultCFF}.{self.NANODefaultSeq}" for seq
in _nanoSeq]
1815 _nanoCustoms = [cust
if cust!=
'' else self.
NANODefaultCustom for cust
in _nanoCustoms]
1817 if len(_nanoSeq) < 1
and '@' in stepSpec:
1818 raise Exception(f
'The specified mapping: {stepSpec} generates an empty NANO sequence. Please provide a valid mapping')
1820 for _subSeq
in _nanoSeq:
1822 _cff,_seq = _subSeq.split(
'.')
1823 print(
"NANO: scheduling:",_seq,
"from",_cff)
1825 _seqToSchedule.append(_seq)
1826 elif '/' in _subSeq:
1830 print(
"NANO: scheduling:",_subSeq)
1831 _seqToSchedule.append(_subSeq)
1835 for custom
in _nanoCustoms:
1836 custom_path = custom
if '.' in custom
else '.'.
join([_nanoCff,custom])
1838 self.
_options.customisation_file.append(custom_path)
1840 if len(self.
_options.customise_commands) > 1:
1841 self.
_options.customise_commands = self.
_options.customise_commands +
" \n" 1842 self.
_options.customise_commands = self.
_options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1845 ''' Enrich the schedule with skimming fragments''' 1848 stdHLTProcName =
'HLT' 1849 newHLTProcName = self.
_options.hltProcess
1850 customiseForReHLT = (newHLTProcName
or (stdHLTProcName
in self.
stepMap))
and (newHLTProcName != stdHLTProcName)
1851 if customiseForReHLT:
1852 print(
"replacing %s process name - step SKIM:%s will use '%s'" % (stdHLTProcName, sequence, newHLTProcName))
1855 from Configuration.Skimming.autoSkim
import autoSkim
1856 skimlist = sequence.split(
'+')
1860 for skim
in skimConfig.__dict__:
1861 skimstream = getattr(skimConfig, skim)
1864 if isinstance(skimstream, cms.Path):
1867 elif isinstance(skimstream, cms.Sequence):
1868 if customiseForReHLT:
1871 if not isinstance(skimstream, cms.FilteredStream):
1874 shortname = skim.replace(
'SKIMStream',
'')
1875 if (sequence==
"all"):
1877 elif (shortname
in skimlist):
1882 skimstreamDQM = cms.FilteredStream(
1883 responsible = skimstream.responsible,
1884 name = skimstream.name+
'DQM',
1885 paths = skimstream.paths,
1886 selectEvents = skimstream.selectEvents,
1887 content = self.
_options.datatier+
'EventContent',
1888 dataTier = cms.untracked.string(self.
_options.datatier)
1891 for i
in range(skimlist.count(shortname)):
1892 skimlist.remove(shortname)
1894 if (skimlist.__len__()!=0
and sequence!=
"all"):
1895 print(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1896 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1900 ''' Enrich the schedule with a user defined sequence ''' 1906 """ Enrich the schedule with the postreco step """ 1913 print(f
"{stepSpec} in preparing validation")
1915 from Validation.Configuration.autoValidation
import autoValidation
1917 if sequence.find(
',')!=-1:
1918 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1919 valSeqName=sequence.split(
',')[1].
split(
'+')
1924 prevalSeqName=sequence.split(
'+')
1925 valSeqName=sequence.split(
'+')
1931 postfix=
'_'+sequence
1932 prevalSeqName=[
'prevalidation'+postfix]
1933 valSeqName=[
'validation'+postfix]
1934 if not hasattr(self.
process,valSeqName[0]):
1936 valSeqName=[sequence]
1948 for s
in valSeqName+prevalSeqName:
1951 for (i,s)
in enumerate(prevalSeqName):
1953 setattr(self.
process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.
process, s)) )
1956 for (i,s)
in enumerate(valSeqName):
1957 setattr(self.
process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.
process, s)))
1964 if not 'DIGI' in self.
stepMap and not self.
_options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1965 if self.
_options.restoreRNDSeeds==
False and not self.
_options.restoreRNDSeeds==
True:
1972 self.
_options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1974 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
1976 for (i,s)
in enumerate(valSeqName):
1983 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1984 It will climb down within PSets, VPSets and VInputTags to find its target""" 1985 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1991 def doIt(self, pset, base):
1992 if isinstance(pset, cms._Parameterizable):
1993 for name
in pset.parameters_().
keys():
1999 value = getattr(pset, name)
2000 valueType = type(value)
2001 if valueType
in [cms.PSet, cms.untracked.PSet, cms.EDProducer]:
2002 self.
doIt(value,base+
"."+name)
2003 elif valueType
in [cms.VPSet, cms.untracked.VPSet]:
2004 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
2005 elif valueType
in [cms.string, cms.untracked.string]:
2009 elif valueType
in [cms.VInputTag, cms.untracked.VInputTag]:
2010 for (i,n)
in enumerate(value):
2011 if not isinstance(n, cms.InputTag):
2018 elif valueType
in [cms.vstring, cms.untracked.vstring]:
2019 for (i,n)
in enumerate(value):
2022 elif valueType
in [cms.InputTag, cms.untracked.InputTag]:
2025 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
2027 def enter(self,visitee):
2030 label = visitee.label()
2031 except AttributeError:
2032 label =
'<Module not in a Process>' 2034 label =
'other execption' 2035 self.
doIt(visitee, label)
2037 def leave(self,visitee):
2042 print(
"Replacing all InputTag %s => %s"%(oldT,newT))
2045 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 2048 self.
additionalCommands.
append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
2054 if proc == HLTprocess:
2057 if verbosityLevel > 0:
2058 print(
"replacing %s process name - sequence %s will use '%s'" % (HLTprocess, sequence, proc))
2059 verboseVisit = (verbosityLevel > 1)
2060 getattr(self.
process,sequence).visit(
2062 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.
additionalCommands:
2065 'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",), verbose = %s))' 2066 % (sequence, HLTprocess, proc, verboseVisit))
2071 while '@' in repr(seqList)
and level<maxLevel:
2073 for specifiedCommand
in seqList:
2074 if specifiedCommand.startswith(
'@'):
2075 location=specifiedCommand[1:]
2076 if not location
in mapping:
2077 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
2078 mappedTo=mapping[location]
2080 mappedTo=mappedTo[index]
2081 seqList.remove(specifiedCommand)
2082 seqList.extend(mappedTo.split(
'+'))
2085 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
2093 sequenceList=_dqmSeq.split(
'+')
2094 postSequenceList=_dqmSeq.split(
'+')
2095 from DQMOffline.Configuration.autoDQM
import autoDQM
2099 if len(set(sequenceList))!=len(sequenceList):
2101 print(
"Duplicate entries for DQM:, using",sequenceList)
2103 pathName=
'dqmoffline_step' 2104 for (i,_sequence)
in enumerate(sequenceList):
2106 pathName=
'dqmoffline_%d_step'%(i)
2111 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process,_sequence ) ) )
2114 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
2119 pathName=
'dqmofflineOnPAT_step' 2120 for (i,_sequence)
in enumerate(postSequenceList):
2122 if (sequenceList[i]==postSequenceList[i]):
2125 pathName=
'dqmofflineOnPAT_%d_step'%(i)
2127 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process, _sequence ) ) )
2131 """ Enrich the process with harvesting step """ 2132 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self.
_options.harvesting+
'_cff' 2138 harvestingList = sequence.split(
"+")
2139 from DQMOffline.Configuration.autoDQM
import autoDQM
2140 from Validation.Configuration.autoValidation
import autoValidation
2142 combined_mapping = copy.deepcopy( autoDQM )
2143 combined_mapping.update( autoValidation )
2144 self.
expandMapping(harvestingList,combined_mapping,index=-1)
2146 if len(set(harvestingList))!=len(harvestingList):
2147 harvestingList=list(
OrderedSet(harvestingList))
2148 print(
"Duplicate entries for HARVESTING, using",harvestingList)
2150 for name
in harvestingList:
2151 if not name
in harvestingConfig.__dict__:
2152 print(name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2156 harvestingstream = getattr(harvestingConfig,name)
2157 if isinstance(harvestingstream,cms.Path):
2160 if isinstance(harvestingstream,cms.Sequence):
2161 setattr(self.
process,name+
"_step",cms.Path(harvestingstream))
2172 """ Enrich the process with AlCaHarvesting step """ 2174 sequence=stepSpec.split(
".")[-1]
2177 harvestingList = sequence.split(
"+")
2181 from Configuration.AlCa.autoPCL
import autoPCL
2184 for name
in harvestingConfig.__dict__:
2185 harvestingstream = getattr(harvestingConfig,name)
2186 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2188 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2189 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2190 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2191 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2193 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2194 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2195 harvestingList.remove(name)
2197 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2200 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2201 print(
"The following harvesting could not be found : ", harvestingList)
2202 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2217 """ Add useful info for the production. """ 2218 self.
process.configurationMetadata=cms.untracked.PSet\
2219 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2220 name=cms.untracked.string(
"Applications"),
2221 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2229 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2231 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2240 from Configuration.StandardSequences.Eras
import eras
2241 for requestedEra
in self.
_options.era.split(
",") :
2242 modifierStrings.append(requestedEra)
2243 modifierImports.append(eras.pythonCfgLines[requestedEra])
2244 modifiers.append(getattr(eras,requestedEra))
2250 for c
in self.
_options.procModifiers:
2251 thingsImported.extend(c.split(
","))
2252 for pm
in thingsImported:
2253 modifierStrings.append(pm)
2254 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2255 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2261 if len(modifierStrings)>0:
2268 if len(modifiers)>0:
2276 def prepare(self, doChecking = False):
2277 """ Prepare the configuration string and add missing pieces.""" 2289 outputModuleCfgCode=
"" 2295 self.
pythonCfgCode +=
"# import of standard configurations\n" 2300 if not hasattr(self.
process,
"configurationMetadata"):
2322 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2323 tmpOut = cms.EndPath(output)
2324 setattr(self.
process,name+
'OutPath',tmpOut)
2333 for object
in self.
_options.inlineObjects.split(
','):
2336 if not hasattr(self.
process,object):
2337 print(
'cannot inline -'+object+
'- : not known')
2342 if self.
_options.pileup==
'HiMixEmbGEN':
2343 self.
pythonCfgCode +=
"\nprocess.generator.embeddingMode=cms.int32(1)\n" 2347 for path
in self.
process.paths:
2351 for endpath
in self.
process.endpaths:
2359 pathNames = [
'process.'+p.label_()
for p
in self.
schedule]
2360 if self.
process.schedule ==
None:
2361 self.
process.schedule = cms.Schedule()
2363 self.
process.schedule.append(item)
2364 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2367 raise Exception(
'the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2369 for index, item
in enumerate(self.
schedule):
2371 self.
process.schedule.insert(index, item)
2373 self.
process.schedule.append(item)
2375 result =
"# process.schedule imported from cff in HLTrigger.Configuration\n" 2377 result +=
'process.schedule.insert('+
str(index)+
', '+item+
')\n' 2384 self.
process.schedule.associate(getattr(self.
process, labelToAssociate))
2385 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2389 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2392 overrideThreads = (self.
_options.nThreads != 1)
2393 overrideConcurrentLumis = (self.
_options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2394 overrideConcurrentIOVs = (self.
_options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2396 if overrideThreads
or overrideConcurrentLumis
or overrideConcurrentIOVs:
2404 if overrideConcurrentLumis:
2406 self.
process.options.numberOfConcurrentLuminosityBlocks = self.
_options.nConcurrentLumis
2407 if overrideConcurrentIOVs:
2409 self.
process.options.eventSetup.numberOfConcurrentIOVs = self.
_options.nConcurrentIOVs
2411 if self.
_options.accelerators
is not None:
2412 accelerators = self.
_options.accelerators.split(
',')
2414 self.
pythonCfgCode +=
"# Enable only these accelerator backends\n" 2415 self.
pythonCfgCode +=
"process.load('Configuration.StandardSequences.Accelerators_cff')\n" 2416 self.
pythonCfgCode +=
"process.options.accelerators = ['" +
"', '".
join(accelerators) +
"']\n" 2417 self.
process.
load(
'Configuration.StandardSequences.Accelerators_cff')
2418 self.
process.options.accelerators = accelerators
2423 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2424 self.
pythonCfgCode +=
"MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n" 2425 MassReplaceInputTag(self.
process, new=
"rawDataMapperByLabel", old=
"rawDataCollector")
2429 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2437 for path
in self.
process.paths:
2447 print(
"--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2456 if hasattr(self.
process,
"logErrorHarvester"):
2458 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2459 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2460 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2461 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2468 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2469 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2470 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2472 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2475 imports = cms.specialImportRegistry.getSpecialImports()
2476 if len(imports) > 0:
2488 if not self.
_options.io.endswith(
'.io'): self._option.io+=
'.io' 2491 if hasattr(self.
process.source,
"fileNames"):
2492 if len(self.
process.source.fileNames.value()):
2493 ioJson[
'primary']=self.
process.source.fileNames.value()
2494 if hasattr(self.
process.source,
"secondaryFileNames"):
2495 if len(self.
process.source.secondaryFileNames.value()):
2496 ioJson[
'secondary']=self.
process.source.secondaryFileNames.value()
2497 if self.
_options.pileup_input
and (self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:')):
2498 ioJson[
'pileup']=self.
_options.pileup_input[4:]
2500 ioJson[o]=om.fileName.value()
2501 ioJson[
'GT']=self.
process.GlobalTag.globaltag.value()
2505 io.write(json.dumps(ioJson))
2508 def load(self, includeFile)
def prepare_L1(self, stepSpec=None)
def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ='')
def expandMapping(self, seqList, mapping, index=None)
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT', verbosityLevel=1)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
ALPAKA_FN_HOST_ACC ALPAKA_FN_INLINE constexpr float zip(ConstView const &tracks, int32_t i)
def finalizeFastSimHLT(self)
inliner
load the relevant part
def expandNanoMapping(seqList, mapping, key)
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def customiseEarlyDelete(process)
def prepare_CFWRITER(self, stepSpec=None)
def prepare_L1P2GT(self, stepSpec=None)
def prepare_RECOBEFMIX(self, stepSpec="reconstruction")
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, stepSpec=None)
def build_production_info(self, evt_type, evtnumber)
def prepare_RECOSIM(self, stepSpec="recosim")
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def prepare_L1HwVal(self, stepSpec='L1HwVal')
def prepare_DIGI2RAW(self, stepSpec=None)
def prepare_POSTRECO(self, stepSpec=None)
def prepare_SKIM(self, stepSpec="all")
def prepare_ALCAPRODUCER(self, stepSpec=None)
def prepare_HARVESTING(self, stepSpec=None)
def prepare_ALCAOUTPUT(self, stepSpec=None)
def prepare_RAW2DIGI(self, stepSpec="RawToDigi")
def prepare_GEN(self, stepSpec=None)
def prepare_FILTER(self, stepSpec=None)
scheduleIndexOfFirstHLTPath
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
def prepare_PAT(self, stepSpec="miniAOD")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_L1Reco(self, stepSpec="L1Reco")
def prepare_HLT(self, stepSpec=None)
def addCustomiseCmdLine(self)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, stepSpec=None)
def loadAndRemember(self, includeFile)
def prepare_ENDJOB(self, stepSpec='endOfProcess')
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_DQM(self, stepSpec='DQMOffline')
def prepare_ALCAHARVEST(self, stepSpec=None)
def split(sequence, size)
static std::string join(char **cmd)
def prepare_USER(self, stepSpec=None)
def prepare_ALCA(self, stepSpec=None, workflow='full')
def dumpPython(process, name)
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_REPACK(self, stepSpec=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_L1REPACK(self, stepSpec=None)
def prepare_L1TrackTrigger(self, stepSpec="L1TrackTrigger")
def prepare_RAW2RECO(self, stepSpec=None)
def prepare_NANO(self, stepSpec='')
def prepare_VALIDATION(self, stepSpec='validation')
def lumi_to_run(runs, events_in_sample, events_per_job)
def scheduleSequenceAtEnd(self, seq, prefix)
def prepare_RECO(self, stepSpec="reconstruction")
def prepare_SIM(self, stepSpec=None)
def filesFromList(fileName, s=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def heapProfileOptions(self)
def prepare_PATGEN(self, stepSpec="miniGEN")
def prepare_LHE(self, stepSpec=None)
def completeInputCommand(self)
def prepare_DATAMIX(self, stepSpec=None)
def executeAndRemember(self, command)
def addStandardSequences(self)
nextScheduleIsConditional
put the filtering path in the schedule