3 __version__ =
"$Revision: 1.19 $" 4 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 6 import FWCore.ParameterSet.Config
as cms
7 from FWCore.ParameterSet.Modules
import _Module
10 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
16 from subprocess
import Popen,PIPE
17 import FWCore.ParameterSet.DictTypes
as DictTypes
18 from FWCore.ParameterSet.OrderedSet
import OrderedSet
24 defaultOptions.datamix =
'DataOnSim' 25 defaultOptions.isMC=
False 26 defaultOptions.isData=
True 27 defaultOptions.step=
'' 28 defaultOptions.pileup=
'NoPileUp' 29 defaultOptions.pileup_input =
None 30 defaultOptions.pileup_dasoption =
'' 31 defaultOptions.geometry =
'SimDB' 32 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
33 defaultOptions.magField =
'' 34 defaultOptions.conditions =
None 35 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
36 defaultOptions.harvesting=
'AtRunEnd' 37 defaultOptions.gflash =
False 38 defaultOptions.number = -1
39 defaultOptions.number_out =
None 40 defaultOptions.arguments =
"" 41 defaultOptions.name =
"NO NAME GIVEN" 42 defaultOptions.evt_type =
"" 43 defaultOptions.filein =
"" 44 defaultOptions.dasquery=
"" 45 defaultOptions.dasoption=
"" 46 defaultOptions.secondfilein =
"" 47 defaultOptions.customisation_file = []
48 defaultOptions.customisation_file_unsch = []
49 defaultOptions.customise_commands =
"" 50 defaultOptions.inline_custom=
False 51 defaultOptions.particleTable =
'pythiapdt' 52 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
53 defaultOptions.dirin =
'' 54 defaultOptions.dirout =
'' 55 defaultOptions.filetype =
'EDM' 56 defaultOptions.fileout =
'output.root' 57 defaultOptions.filtername =
'' 58 defaultOptions.lazy_download =
False 59 defaultOptions.custom_conditions =
'' 60 defaultOptions.hltProcess =
'' 61 defaultOptions.eventcontent =
None 62 defaultOptions.datatier =
None 63 defaultOptions.inlineEventContent =
True 64 defaultOptions.inlineObjects =
'' 65 defaultOptions.hideGen=
False 66 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
67 defaultOptions.beamspot=
None 68 defaultOptions.outputDefinition =
'' 69 defaultOptions.inputCommands =
None 70 defaultOptions.outputCommands =
None 71 defaultOptions.inputEventContent =
'' 72 defaultOptions.dropDescendant =
False 73 defaultOptions.relval =
None 74 defaultOptions.profile =
None 75 defaultOptions.heap_profile =
None 76 defaultOptions.isRepacked =
False 77 defaultOptions.restoreRNDSeeds =
False 78 defaultOptions.donotDropOnInput =
'' 79 defaultOptions.python_filename =
'' 80 defaultOptions.io=
None 81 defaultOptions.lumiToProcess=
None 82 defaultOptions.fast=
False 83 defaultOptions.runsAndWeightsForMC =
None 84 defaultOptions.runsScenarioForMC =
None 85 defaultOptions.runsAndWeightsForMCIntegerWeights =
None 86 defaultOptions.runsScenarioForMCIntegerWeights =
None 87 defaultOptions.runUnscheduled =
False 88 defaultOptions.timeoutOutput =
False 89 defaultOptions.nThreads = 1
90 defaultOptions.nStreams = 0
91 defaultOptions.nConcurrentLumis = 0
92 defaultOptions.nConcurrentIOVs = 0
93 defaultOptions.accelerators =
None 97 theObject = getattr(process,name)
98 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
99 return "process."+name+
" = " + theObject.dumpPython()
100 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
101 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 103 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 106 import FWCore.ParameterSet.Config
as cms
109 for line
in open(fileName,
'r'): 110 if line.count(
".root")>=2:
112 entries=line.replace(
"\n",
"").
split()
113 prim.append(entries[0])
114 sec.append(entries[1])
115 elif (line.find(
".root")!=-1):
116 entry=line.replace(
"\n",
"")
120 prim = [f
for f
in prim
if not (f
in file_seen
or file_seen.add(f))]
122 sec = [f
for f
in sec
if not (f
in file_seen
or file_seen.add(f))]
124 if not hasattr(s,
"fileNames"):
125 s.fileNames=cms.untracked.vstring(prim)
127 s.fileNames.extend(prim)
129 if not hasattr(s,
"secondaryFileNames"):
130 s.secondaryFileNames=cms.untracked.vstring(sec)
132 s.secondaryFileNames.extend(sec)
133 print(
"found files: ",prim)
135 raise Exception(
"There are not files in input from the file list")
137 print(
"found parent files:",sec)
142 import FWCore.ParameterSet.Config
as cms
145 print(
"the query is",query)
148 while eC!=0
and count<3:
150 print(
'Sleeping, then retrying DAS')
152 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True, universal_newlines=
True)
154 tupleP = os.waitpid(p.pid, 0)
158 print(
"DAS succeeded after",count,
"attempts",eC)
160 print(
"DAS failed 3 times- I give up")
161 for line
in pipe.split(
'\n'):
162 if line.count(
".root")>=2:
164 entries=line.replace(
"\n",
"").
split()
165 prim.append(entries[0])
166 sec.append(entries[1])
167 elif (line.find(
".root")!=-1):
168 entry=line.replace(
"\n",
"")
171 prim = sorted(list(set(prim)))
172 sec = sorted(list(set(sec)))
174 if not hasattr(s,
"fileNames"):
175 s.fileNames=cms.untracked.vstring(prim)
177 s.fileNames.extend(prim)
179 if not hasattr(s,
"secondaryFileNames"):
180 s.secondaryFileNames=cms.untracked.vstring(sec)
182 s.secondaryFileNames.extend(sec)
183 print(
"found files: ",prim)
185 print(
"found parent files:",sec)
188 def anyOf(listOfKeys,dict,opt=None):
197 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
200 """The main building routines """ 202 def __init__(self, options, process = None, with_output = False, with_input = False ):
203 """options taken from old cmsDriver and optparse """ 205 options.outfile_name = options.dirout+options.fileout
209 if self.
_options.isData
and options.isMC:
210 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
216 if (hasattr(self.
_options,
"outputDefinition")
and \
217 self.
_options.outputDefinition !=
'' and \
218 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self.
_options.outputDefinition)))
or \
219 (hasattr(self.
_options,
"datatier")
and \
222 print(
"removing ENDJOB from steps since not compatible with DQMIO dataTier")
228 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
231 for step
in self.
_options.step.split(
","):
232 if step==
'':
continue 233 stepParts = step.split(
":")
234 stepName = stepParts[0]
235 if stepName
not in stepList
and not stepName.startswith(
're'):
236 raise ValueError(
"Step {} unknown. Available are {}".
format( stepName , sorted(stepList)))
237 if len(stepParts)==1:
239 elif len(stepParts)==2:
241 elif len(stepParts)==3:
242 self.
stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
244 raise ValueError(f
"Step definition {step} invalid")
252 if hasattr(self.
_options,
"no_output_flag")
and self.
_options.no_output_flag:
280 Function to add the igprof profile service so that you can dump in the middle 283 profileOpts = self.
_options.profile.split(
':')
285 profilerInterval = 100
286 profilerFormat =
None 287 profilerJobFormat =
None 293 startEvent = profileOpts.pop(0)
294 if not startEvent.isdigit():
295 raise Exception(
"%s is not a number" % startEvent)
296 profilerStart =
int(startEvent)
298 eventInterval = profileOpts.pop(0)
299 if not eventInterval.isdigit():
300 raise Exception(
"%s is not a number" % eventInterval)
301 profilerInterval =
int(eventInterval)
303 profilerFormat = profileOpts.pop(0)
306 if not profilerFormat:
307 profilerFormat =
"%s___%s___%%I.gz" % (
308 self.
_options.evt_type.replace(
"_cfi",
""),
314 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
315 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
316 elif not profilerJobFormat:
317 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 319 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
324 Function to add the jemalloc heap profile service so that you can dump in the middle 327 profileOpts = self.
_options.profile.split(
':')
329 profilerInterval = 100
330 profilerFormat =
None 331 profilerJobFormat =
None 337 startEvent = profileOpts.pop(0)
338 if not startEvent.isdigit():
339 raise Exception(
"%s is not a number" % startEvent)
340 profilerStart =
int(startEvent)
342 eventInterval = profileOpts.pop(0)
343 if not eventInterval.isdigit():
344 raise Exception(
"%s is not a number" % eventInterval)
345 profilerInterval =
int(eventInterval)
347 profilerFormat = profileOpts.pop(0)
350 if not profilerFormat:
351 profilerFormat =
"%s___%s___%%I.heap" % (
352 self.
_options.evt_type.replace(
"_cfi",
""),
358 if not profilerJobFormat
and profilerFormat.endswith(
".heap"):
359 profilerJobFormat = profilerFormat.replace(
".heap",
"_EndOfJob.heap")
360 elif not profilerJobFormat:
361 profilerJobFormat = profilerFormat +
"_EndOfJob.heap" 363 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
366 includeFile = includeFile.replace(
'/',
'.')
368 return sys.modules[includeFile]
371 """helper routine to load am memorize imports""" 374 includeFile = includeFile.replace(
'/',
'.')
377 return sys.modules[includeFile]
380 """helper routine to remember replace statements""" 382 if not command.strip().startswith(
"#"):
385 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
390 self.
process.options.Rethrow = [
'ProductNotFound']
391 self.
process.options.fileMode =
'FULLMERGE' 396 self.
process.AdaptorConfig = cms.Service(
"AdaptorConfig",
397 stats = cms.untracked.bool(
True),
398 enable = cms.untracked.bool(
True),
399 cacheHint = cms.untracked.string(
"lazy-download"),
400 readHint = cms.untracked.string(
"read-ahead-buffered")
409 self.
process.IgProfService = cms.Service(
"IgProfService",
410 reportFirstEvent = cms.untracked.int32(start),
411 reportEventInterval = cms.untracked.int32(interval),
412 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
413 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
418 self.
process.JeProfService = cms.Service(
"JeProfService",
419 reportFirstEvent = cms.untracked.int32(start),
420 reportEventInterval = cms.untracked.int32(interval),
421 reportToFileAtPostEvent = cms.untracked.string(
"%s"%(eventFormat)),
422 reportToFileAtPostEndJob = cms.untracked.string(
"%s"%(jobFormat)))
426 """Here we decide how many evts will be processed""" 433 """Here the source is built. Priority: file, generator""" 436 def filesFromOption(self):
437 for entry
in self.
_options.filein.split(
','):
439 if entry.startswith(
"filelist:"):
441 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
446 if not hasattr(self.
process.source,
"secondaryFileNames"):
447 raise Exception(
"--secondfilein not compatible with "+self.
_options.filetype+
"input type")
448 for entry
in self.
_options.secondfilein.split(
','):
450 if entry.startswith(
"filelist:"):
452 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
455 self.
process.source.secondaryFileNames.append(self.
_options.dirin+entry)
459 self.
process.source=cms.Source(
"PoolSource",
460 fileNames = cms.untracked.vstring(),
461 secondaryFileNames= cms.untracked.vstring())
462 filesFromOption(self)
463 elif self.
_options.filetype ==
"DAT":
464 self.
process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
465 filesFromOption(self)
466 elif self.
_options.filetype ==
"LHE":
467 self.
process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
468 if self.
_options.filein.startswith(
"lhe:"):
470 args=self.
_options.filein.split(
':')
472 print(
'LHE input from article ',article)
473 location=
'/store/lhe/' 475 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
476 for line
in textOfFiles:
477 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
478 self.
process.source.fileNames.append(location+article+
'/'+fileName)
481 print(
'Issue to load LHE files, please check and try again.')
484 if len(self.
process.source.fileNames)==0:
485 print(
'Issue with empty filename, but can pass line check')
488 self.
process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
490 filesFromOption(self)
492 elif self.
_options.filetype ==
"DQM":
493 self.
process.source=cms.Source(
"DQMRootSource",
494 fileNames = cms.untracked.vstring())
495 filesFromOption(self)
497 elif self.
_options.filetype ==
"DQMDAQ":
499 self.
process.source=cms.Source(
"DQMStreamerReader")
503 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
506 self.
process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
510 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
515 self.
_options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 517 self.
_options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 520 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
521 for command
in self.
_options.inputCommands.split(
','):
523 command = command.strip()
524 if command==
'':
continue 525 self.
process.source.inputCommands.append(command)
526 if not self.
_options.dropDescendant:
527 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
530 import FWCore.PythonUtilities.LumiList
as LumiList
534 if self.
process.source
is None:
535 self.
process.source=cms.Source(
"EmptySource")
541 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
542 if self.
_options.runsAndWeightsForMC:
545 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
546 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMC], str):
547 __import__(RunsAndWeights[self.
_options.runsScenarioForMC])
548 self.
runsAndWeights = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMC]].runProbabilityDistribution
553 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
560 if self.
_options.runsAndWeightsForMCIntegerWeights
or self.
_options.runsScenarioForMCIntegerWeights:
562 raise Exception(
"options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
563 if self.
_options.runsAndWeightsForMCIntegerWeights:
566 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
567 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights], str):
568 __import__(RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights])
569 self.
runsAndWeightsInt = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
575 raise Exception(
"--relval option required when using --runsAndWeightsInt")
577 from SimGeneral.Configuration.LumiToRun
import lumi_to_run
578 total_events, events_per_job = self.
_options.relval.split(
',')
580 self.
additionalCommands.
append(
"process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " +
str(lumi_to_run_mapping) +
"])")
585 """ Add output module to the process """ 589 print(
"--datatier & --eventcontent options ignored")
592 outList = eval(self.
_options.outputDefinition)
593 for (id,outDefDict)
in enumerate(outList):
594 outDefDictStr=outDefDict.__str__()
595 if not isinstance(outDefDict,dict):
596 raise Exception(
"--output needs to be passed a list of dict"+self.
_options.outputDefinition+
" is invalid")
598 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
601 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
602 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
603 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
604 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
605 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
607 if not theModuleLabel:
608 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
609 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
610 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 612 for name
in tryNames:
613 if not hasattr(self.
process,name):
616 if not theModuleLabel:
617 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
619 defaultFileName=self.
_options.outfile_name
621 defaultFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
623 theFileName=self.
_options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
624 if not theFileName.endswith(
'.root'):
628 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
629 if theStreamType==
'DQMIO': theStreamType=
'DQM' 630 if theStreamType==
'ALL':
631 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
633 theEventContent = getattr(self.
process, theStreamType+
"EventContent")
637 if theStreamType==
'ALCARECO' and not theFilterName:
638 theFilterName=
'StreamALCACombined' 641 CppType=
'PoolOutputModule' 643 CppType=
'TimeoutPoolOutputModule' 644 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 645 output = cms.OutputModule(CppType,
646 theEventContent.clone(),
647 fileName = cms.untracked.string(theFileName),
648 dataset = cms.untracked.PSet(
649 dataTier = cms.untracked.string(theTier),
650 filterName = cms.untracked.string(theFilterName))
652 if not theSelectEvent
and hasattr(self.
process,
'generation_step')
and theStreamType!=
'LHE':
653 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
654 if not theSelectEvent
and hasattr(self.
process,
'filtering_step'):
655 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
657 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
660 if not hasattr(output,
'SelectEvents'):
661 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
663 output.SelectEvents.SelectEvents.extend(getattr(self.
process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
666 if hasattr(self.
process,theModuleLabel):
667 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
669 setattr(self.
process,theModuleLabel,output)
670 outputModule=getattr(self.
process,theModuleLabel)
671 setattr(self.
process,theModuleLabel+
'_step',cms.EndPath(outputModule))
672 path=getattr(self.
process,theModuleLabel+
'_step')
675 if not self.
_options.inlineEventContent
and hasattr(self.
process,theStreamType+
"EventContent"):
676 def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process.
"+theStreamType+"EventContent.outputCommands)
"): return label
677 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
678 if theExtraOutputCommands:
679 if not isinstance(theExtraOutputCommands,list):
680 raise Exception(
"extra ouput command in --option must be a list of strings")
681 if hasattr(self.
process,theStreamType+
"EventContent"):
682 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
684 outputModule.outputCommands.extend(theExtraOutputCommands)
686 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
691 streamTypes=self.
_options.eventcontent.split(
',')
692 tiers=self.
_options.datatier.split(
',')
693 if not self.
_options.outputDefinition
and len(streamTypes)!=len(tiers):
694 raise Exception(
"number of event content arguments does not match number of datatier arguments")
700 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
701 if streamType==
'':
continue 702 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self.
_options.step:
continue 703 if streamType==
'DQMIO': streamType=
'DQM' 704 eventContent=streamType
706 if streamType ==
"NANOEDMAOD" :
707 eventContent =
"NANOAOD" 708 elif streamType ==
"NANOEDMAODSIM" :
709 eventContent =
"NANOAODSIM" 710 theEventContent = getattr(self.
process, eventContent+
"EventContent")
712 theFileName=self.
_options.outfile_name
713 theFilterName=self.
_options.filtername
715 theFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
716 theFilterName=self.
_options.filtername
717 CppType=
'PoolOutputModule' 719 CppType=
'TimeoutPoolOutputModule' 720 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 721 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 722 output = cms.OutputModule(CppType,
724 fileName = cms.untracked.string(theFileName),
725 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
726 filterName = cms.untracked.string(theFilterName)
729 if hasattr(self.
process,
"generation_step")
and streamType!=
'LHE':
730 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
731 if hasattr(self.
process,
"filtering_step"):
732 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
734 if streamType==
'ALCARECO':
735 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
737 if "MINIAOD" in streamType:
738 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
741 outputModuleName=streamType+
'output' 742 setattr(self.
process,outputModuleName,output)
743 outputModule=getattr(self.
process,outputModuleName)
744 setattr(self.
process,outputModuleName+
'_step',cms.EndPath(outputModule))
745 path=getattr(self.
process,outputModuleName+
'_step')
748 if self.
_options.outputCommands
and streamType!=
'DQM':
749 for evct
in self.
_options.outputCommands.split(
','):
750 if not evct:
continue 751 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
753 if not self.
_options.inlineEventContent:
754 tmpstreamType=streamType
755 if "NANOEDM" in tmpstreamType :
756 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
757 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
759 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
761 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
767 Add selected standard sequences to the process 771 pileupSpec=self.
_options.pileup.split(
',')[0]
774 pileups_without_input=[defaultOptions.pileup,
"Cosmics",
"default",
"HiMixNoPU",
None]
775 if self.
_options.pileup
not in pileups_without_input
and self.
_options.pileup_input==
None:
776 message =
"Pileup scenerio requires input files. Please add an appropriate --pileup_input option" 780 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
781 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
782 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
786 if '.' in pileupSpec:
787 mixingDict={
'file':pileupSpec}
788 elif pileupSpec.startswith(
'file:'):
789 mixingDict={
'file':pileupSpec[5:]}
792 mixingDict=copy.copy(Mixing[pileupSpec])
793 if len(self.
_options.pileup.split(
','))>1:
794 mixingDict.update(eval(self.
_options.pileup[self.
_options.pileup.find(
',')+1:]))
797 if 'file:' in pileupSpec:
800 print(
"inlining mixing module configuration")
805 mixingDict.pop(
'file')
808 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
810 elif self.
_options.pileup_input.startswith(
"filelist:"):
813 mixingDict[
'F']=self.
_options.pileup_input.split(
',')
815 for command
in specialization:
817 if len(mixingDict)!=0:
818 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
839 stepSpec = self.
stepMap[stepName]
840 print(
"Step:", stepName,
"Spec:",stepSpec)
841 if stepName.startswith(
're'):
843 if stepName[2:]
not in self.
_options.donotDropOnInput:
844 self.
_options.inputEventContent=
'%s,%s'%(stepName.upper(),self.
_options.inputEventContent)
845 stepName=stepName[2:]
847 getattr(self,
"prepare_"+stepName)(stepSpec = getattr(self,stepName+
"DefaultSeq"))
848 elif isinstance(stepSpec, list):
849 getattr(self,
"prepare_"+stepName)(stepSpec =
'+'.
join(stepSpec))
850 elif isinstance(stepSpec, tuple):
851 getattr(self,
"prepare_"+stepName)(stepSpec =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
853 raise ValueError(
"Invalid step definition")
855 if self.
_options.restoreRNDSeeds!=
False:
857 if self.
_options.restoreRNDSeeds==
True:
858 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
860 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self.
_options.restoreRNDSeeds))
863 self.
_options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 865 self.
_options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 871 def dropSecondDropStar(iec):
881 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
882 for evct
in self.
_options.inputEventContent.split(
','):
883 if evct==
'':
continue 884 theEventContent = getattr(self.
process, evct+
"EventContent")
885 if hasattr(theEventContent,
'outputCommands'):
886 self.
process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
887 if hasattr(theEventContent,
'inputCommands'):
888 self.
process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
890 dropSecondDropStar(self.
process.source.inputCommands)
892 if not self.
_options.dropDescendant:
893 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
899 """Add conditions to the process""" 900 if not self.
_options.conditions:
return 902 if 'FrontierConditions_GlobalTag' in self.
_options.conditions:
903 print(
'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
904 self.
_options.conditions = self.
_options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
907 from Configuration.AlCa.GlobalTag
import GlobalTag
914 """Include the customise code """ 918 for c
in self.
_options.customisation_file:
919 custOpt.extend(c.split(
","))
921 for c
in self.
_options.customisation_file_unsch:
922 custOpt.extend(c.split(
","))
928 raise Exception(
"more than . in the specification:"+opt)
929 fileName=opt.split(
'.')[0]
930 if opt.count(
'.')==0: rest=
'customise' 932 rest=opt.split(
'.')[1]
933 if rest==
'py': rest=
'customise' 935 if fileName
in custMap:
936 custMap[fileName].extend(rest.split(
'+'))
938 custMap[fileName]=rest.split(
'+')
943 final_snippet=
'\n# customisation of the process.\n' 947 allFcn.extend(custMap[opt])
949 if allFcn.count(fcn)!=1:
950 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
954 packageName = f.replace(
".py",
"").
replace(
"/",
".")
955 __import__(packageName)
956 package = sys.modules[packageName]
959 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
961 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 963 for line
in file(customiseFile,
'r'): 964 if "import FWCore.ParameterSet.Config" in line:
966 final_snippet += line
968 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
969 for fcn
in custMap[f]:
970 print(
"customising the process with",fcn,
"from",f)
971 if not hasattr(package,fcn):
973 raise Exception(
"config "+f+
" has no function "+fcn)
977 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
978 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
981 final_snippet +=
'\n# End of customisation functions\n' 987 final_snippet=
'\n# Customisation from command line\n' 988 if self.
_options.customise_commands:
990 for com
in self.
_options.customise_commands.split(
'\\n'):
993 final_snippet +=
'\n'+com
1004 if self.
_options.particleTable
not in defaultOptions.particleTableList:
1005 print(
'Invalid particle table provided. Options are:')
1006 print(defaultOptions.particleTable)
1014 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreams_cff" 1048 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 1088 self.
_options.beamspot=VtxSmearedDefaultKey
1093 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 1094 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 1097 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1106 if self.
_options.scenario==
'cosmics':
1108 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1109 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1110 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1120 if self.
_options.scenario==
'HeavyIons':
1122 self.
_options.beamspot=VtxSmearedHIDefaultKey
1127 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1129 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1132 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1144 self.
magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self.
_options.magField.replace(
'.',
'')+
'_cff' 1148 self.
GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff' 1152 if 'start' in self.
_options.conditions.lower():
1153 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1155 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1157 def inGeometryKeys(opt):
1158 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1159 if opt
in GeometryConf:
1160 return GeometryConf[opt]
1164 geoms=self.
_options.geometry.split(
',')
1165 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).
split(
',')
1168 if '/' in geoms[1]
or '_cff' in geoms[1]:
1171 self.
GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1173 if (geoms[0].startswith(
'DB:')):
1174 self.
SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff' 1178 if '/' in geoms[0]
or '_cff' in geoms[0]:
1181 simGeometry=geoms[0]
1183 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1185 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1188 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1189 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1192 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1197 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1198 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1205 if self.
_options.pileup==
'default':
1206 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1207 self.
_options.pileup=MixingDefaultKey
1220 output = cms.OutputModule(
"PoolOutputModule")
1221 if stream.selectEvents.parameters_().__len__()!=0:
1222 output.SelectEvents = stream.selectEvents
1224 output.SelectEvents = cms.untracked.PSet()
1225 output.SelectEvents.SelectEvents=cms.vstring()
1226 if isinstance(stream.paths,tuple):
1227 for path
in stream.paths:
1228 output.SelectEvents.SelectEvents.append(path.label())
1230 output.SelectEvents.SelectEvents.append(stream.paths.label())
1234 if isinstance(stream.content,str):
1235 evtPset=getattr(self.process,stream.content)
1236 for p
in evtPset.parameters_():
1237 setattr(output,p,getattr(evtPset,p))
1238 if not self._options.inlineEventContent:
1239 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1241 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1243 output.outputCommands = stream.content
1246 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1248 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1249 filterName = cms.untracked.string(stream.name))
1251 if self._options.filtername:
1252 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1255 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1257 if workflow
in (
"producers,full"):
1258 if isinstance(stream.paths,tuple):
1259 for path
in stream.paths:
1260 self.schedule.
append(path)
1262 self.schedule.
append(stream.paths)
1266 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1267 self.additionalOutputs[name] = output
1268 setattr(self.process,name,output)
1270 if workflow ==
'output':
1272 filterList = output.SelectEvents.SelectEvents
1273 for i, filter
in enumerate(filterList):
1274 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1284 _dotsplit = stepSpec.split(
'.')
1285 if ( len(_dotsplit)==1 ):
1286 if '/' in _dotsplit[0]:
1287 _sequence = defaultSEQ
if defaultSEQ
else stepSpec
1290 _sequence = stepSpec
1292 elif ( len(_dotsplit)==2 ):
1293 _cff,_sequence = _dotsplit
1295 print(
"sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1296 print(stepSpec,
"not recognized")
1299 return l,_sequence,_cff
1304 for i,s
in enumerate(seq.split(
'*')):
1306 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, s) ))
1308 p=getattr(self.
process,prefix)
1309 tmp = getattr(self.
process, s)
1310 if isinstance(tmp, cms.Task):
1321 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, seq) ))
1324 for i,s
in enumerate(seq.split(
'+')):
1326 setattr(self.
process,sn,getattr(cms,what)( getattr(self.
process, s) ))
1340 def prepare_ALCA(self, stepSpec = None, workflow = 'full'):
1341 """ Enrich the process with alca streams """ 1346 alcaList = sequence.split(
"+")
1347 for alca
in alcaList:
1348 if (len(alca)>MAXLEN):
1349 raise Exception(
"The following alca "+
str(alca)+
" name (with length "+
str(len(alca))+
" chars) cannot be accepted because it exceeds the DBS constraints on the length of the name of the ALCARECOs producers ("+
str(MAXLEN)+
")!")
1352 from Configuration.AlCa.autoAlca
import autoAlca, AlCaNoConcurrentLumis
1356 for name
in alcaConfig.__dict__:
1357 alcastream = getattr(alcaConfig,name)
1358 shortName = name.replace(
'ALCARECOStream',
'')
1359 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1360 if shortName
in AlCaNoConcurrentLumis:
1361 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".
format(shortName))
1364 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1365 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1367 if 'DQM' in alcaList:
1368 if not self.
_options.inlineEventContent
and hasattr(self.
process,name):
1369 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1371 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1375 if isinstance(alcastream.paths,tuple):
1376 for path
in alcastream.paths:
1381 for i
in range(alcaList.count(shortName)):
1382 alcaList.remove(shortName)
1385 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1386 path = getattr(alcaConfig,name)
1388 alcaList.remove(
'DQM')
1390 if isinstance(alcastream,cms.Path):
1395 if len(alcaList) != 0:
1397 for name
in alcaConfig.__dict__:
1398 alcastream = getattr(alcaConfig,name)
1399 if isinstance(alcastream,cms.FilteredStream):
1400 available.append(name.replace(
'ALCARECOStream',
''))
1401 print(
"The following alcas could not be found "+
str(alcaList))
1402 print(
"available ",available)
1404 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1409 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1410 print(
"Loading lhe fragment from",loadFragment)
1411 __import__(loadFragment)
1412 self.process.
load(loadFragment)
1414 self._options.inlineObjects+=
','+stepSpec
1416 getattr(self.process,stepSpec).nEvents = self._options.number
1419 self.process.lhe_step = cms.Path( getattr( self.process,stepSpec) )
1420 self.excludedPaths.
append(
"lhe_step")
1421 self.schedule.
append( self.process.lhe_step )
1424 """ load the fragment of generator configuration """ 1431 if not '/' in loadFragment:
1432 loadFragment=
'Configuration.Generator.'+loadFragment
1434 loadFragment=loadFragment.replace(
'/',
'.')
1436 print(
"Loading generator fragment from",loadFragment)
1437 __import__(loadFragment)
1442 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1445 from Configuration.Generator.concurrentLumisDisable
import noConcurrentLumiGenerators
1447 generatorModule=sys.modules[loadFragment]
1448 genModules=generatorModule.__dict__
1459 import FWCore.ParameterSet.Modules
as cmstypes
1460 for name
in genModules:
1461 theObject = getattr(generatorModule,name)
1462 if isinstance(theObject, cmstypes._Module):
1464 if theObject.type_()
in noConcurrentLumiGenerators:
1465 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".
format(theObject.type_()))
1468 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1469 self.
_options.inlineObjects+=
','+name
1471 if stepSpec == self.
GENDefaultSeq or stepSpec ==
'pgen_genonly':
1472 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1474 elif 'generator' in genModules:
1477 """ Enrich the schedule with the rest of the generation step """ 1482 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1483 cffToBeLoaded=VtxSmeared[self.
_options.beamspot]
1486 raise Exception(
"VertexSmearing type or beamspot "+self.
_options.beamspot+
" unknown.")
1488 if self.
_options.scenario ==
'HeavyIons':
1489 if self.
_options.pileup==
'HiMixGEN':
1490 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1491 elif self.
_options.pileup==
'HiMixEmbGEN':
1492 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorEmbMix_cff")
1494 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1496 self.
process.generation_step = cms.Path( getattr(self.
process,_genSeqName) )
1500 self.
executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1502 if 'reGEN' in self.
stepMap or stepSpec ==
'pgen_smear':
1506 """ Enrich the schedule with the summary of the filter step """ 1513 """ Enrich the schedule with the simulation step""" 1523 self.
executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1529 """ Enrich the schedule with the digitisation step""" 1533 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1535 if _digiSeq ==
'pdigi_valid' or _digiSeq ==
'pdigi_hi':
1536 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1538 if _digiSeq !=
'pdigi_nogen' and _digiSeq !=
'pdigi_valid_nogen' and _digiSeq !=
'pdigi_hi_nogen' and not self.
process.source.type_()==
'EmptySource' and not self.
_options.filetype ==
"LHE":
1539 if self.
_options.inputEventContent==
'':
1540 self.
_options.inputEventContent=
'REGEN' 1549 """ Enrich the schedule with the crossing frame writer step""" 1555 """ Enrich the schedule with the digitisation step""" 1561 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
1563 elif self.
_options.pileup_input.startswith(
"filelist:"):
1566 theFiles=self.
_options.pileup_input.split(
',')
1568 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1584 menuPath = f
'L1Trigger.Configuration.Phase2GTMenus.{menuFile}' 1585 menuModule = importlib.import_module(menuPath)
1587 theMenu = menuModule.menu
1590 for triggerPathFile
in theMenu:
1593 triggerPathModule = importlib.import_module(triggerPathFile)
1594 for objName
in dir(triggerPathModule):
1595 obj = getattr(triggerPathModule, objName)
1597 if objType == cms.Path:
1598 triggerPaths.append(objName)
1600 triggerScheduleList = [getattr(self.
process, name)
for name
in triggerPaths]
1601 self.
schedule.extend(triggerScheduleList)
1606 """ Run the GT emulation sequence on top of the L1 emulation step """ 1609 self.
scheduleSequence(
'l1tGTAlgoBlockProducerSequence',
'Phase2L1GTAlgoBlockProducer')
1610 if stepSpec ==
None:
1611 defaultMenuFile =
"prototype_2023_v1_0_0" 1617 """ Enrich the schedule with the L1 simulation step""" 1624 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1625 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1626 if stepSpec
in supported:
1627 self.
loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'% stepSpec)
1628 if self.
_options.scenario ==
'HeavyIons':
1632 print(
"L1REPACK with '",stepSpec,
"' is not supported! Supported choices are: ",supported)
1636 """ Enrich the schedule with the HLT simulation step""" 1638 print(
"no specification of the hlt menu has been given, should never happen")
1639 raise Exception(
'no HLT specifications provided')
1643 from Configuration.HLT.autoHLT
import autoHLT
1646 stepSpec = autoHLT[key]
1648 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1654 if self.
_options.scenario ==
'HeavyIons':
1655 optionsForHLT[
'type'] =
'HIon' 1657 optionsForHLT[
'type'] =
'GRun' 1658 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in optionsForHLT.items())
1659 if stepSpec ==
'run,fromSource':
1660 if hasattr(self.
process.source,
'firstRun'):
1661 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1662 elif hasattr(self.
process.source,
'setRunNumber'):
1663 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1665 raise Exception(f
'Cannot replace menu to load {stepSpec}')
1667 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(stepSpec.replace(
',',
':'),optionsForHLTConfig))
1672 self.
_options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1678 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1681 if self.
process.schedule ==
None:
1682 raise Exception(
'the HLT step did not attach a valid schedule to the process')
1689 if not hasattr(self.
process,
'HLTEndSequence'):
1690 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1695 seqReco,seqDigi=stepSpec.spli(
',')
1697 print(f
"RAW2RECO requires two specifications {stepSpec} insufficient")
1711 for filt
in allMetFilterPaths:
1715 ''' Enrich the schedule with L1 HW validation ''' 1717 print(
'\n\n\n DEPRECATED this has no action \n\n\n')
1721 ''' Enrich the schedule with L1 reconstruction ''' 1727 ''' Enrich the schedule with L1 reconstruction ''' 1733 ''' Enrich the schedule with a user defined filter sequence ''' 1735 filterConfig,filterSeq = stepSpec.split(
'.')
1736 filterConfig=self.
load(filterConfig)
1738 class PrintAllModules(
object):
1742 def enter(self,visitee):
1744 label=visitee.label()
1749 def leave(self,v):
pass 1751 expander=PrintAllModules()
1752 getattr(self.
process,filterSeq).visit( expander )
1753 self.
_options.inlineObjects+=
','+expander.inliner
1754 self.
_options.inlineObjects+=
','+filterSeq
1765 ''' Enrich the schedule with reconstruction ''' 1771 ''' Enrich the schedule with reconstruction ''' 1777 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1779 print(
"ERROR: this step is only implemented for FastSim")
1786 ''' Enrich the schedule with PAT ''' 1791 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1794 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1796 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1799 if len(self.
_options.customise_commands) > 1:
1800 self.
_options.customise_commands = self.
_options.customise_commands +
" \n" 1801 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patTrigger.processName = \""+self.
_options.hltProcess+
"\"\n" 1802 self.
_options.customise_commands = self.
_options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1803 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1810 ''' Enrich the schedule with PATGEN ''' 1814 raise Exception(
"PATGEN step can only run on MC")
1819 ''' Enrich the schedule with NANO ''' 1823 from PhysicsTools.NanoAOD.autoNANO
import autoNANO, expandNanoMapping
1825 _nanoCustoms = _nanoSeq.split(
'+')
if '@' in stepSpec
else [
'']
1826 _nanoSeq = _nanoSeq.split(
'+')
1830 _nanoSeq = list(sorted(set(_nanoSeq), key=_nanoSeq.index))
1831 _nanoCustoms = list(sorted(set(_nanoCustoms), key=_nanoCustoms.index))
1833 _nanoSeq = [seq
if seq!=
'' else self.
NANODefaultSeq for seq
in _nanoSeq]
1834 _nanoCustoms = [cust
if cust!=
'' else self.
NANODefaultCustom for cust
in _nanoCustoms]
1836 if len(_nanoSeq) < 1
and '@' in stepSpec:
1837 raise Exception(f
'The specified mapping: {stepSpec} generates an empty NANO sequence. Please provide a valid mappign')
1839 for _subSeq
in _nanoSeq:
1841 _cff,_seq = _subSeq.split(
'.')
1843 _seqToSchedule.append(_seq)
1845 _seqToSchedule.append(_subSeq)
1849 for custom
in _nanoCustoms:
1850 custom_path = custom
if '.' in custom
else '.'.
join([_nanoCff,custom])
1852 self.
_options.customisation_file.append(custom_path)
1854 if len(self.
_options.customise_commands) > 1:
1855 self.
_options.customise_commands = self.
_options.customise_commands +
" \n" 1856 self.
_options.customise_commands = self.
_options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1859 ''' Enrich the schedule with NANOGEN ''' 1861 fromGen =
any([x
in self.
stepMap for x
in [
'LHE',
'GEN',
'AOD']])
1864 custom =
"customizeNanoGEN" if fromGen
else "customizeNanoGENFromMini" 1866 self.
_options.customisation_file_unsch.insert(0,
'.'.
join([_nanogenCff, custom]))
1868 self.
_options.customisation_file.insert(0,
'.'.
join([_nanogenCff, custom]))
1871 ''' Enrich the schedule with skimming fragments''' 1874 stdHLTProcName =
'HLT' 1875 newHLTProcName = self.
_options.hltProcess
1876 customiseForReHLT = (newHLTProcName
or (stdHLTProcName
in self.
stepMap))
and (newHLTProcName != stdHLTProcName)
1877 if customiseForReHLT:
1878 print(
"replacing %s process name - step SKIM:%s will use '%s'" % (stdHLTProcName, sequence, newHLTProcName))
1881 from Configuration.Skimming.autoSkim
import autoSkim
1882 skimlist = sequence.split(
'+')
1886 for skim
in skimConfig.__dict__:
1887 skimstream = getattr(skimConfig, skim)
1890 if isinstance(skimstream, cms.Path):
1893 elif isinstance(skimstream, cms.Sequence):
1894 if customiseForReHLT:
1897 if not isinstance(skimstream, cms.FilteredStream):
1900 shortname = skim.replace(
'SKIMStream',
'')
1901 if (sequence==
"all"):
1903 elif (shortname
in skimlist):
1908 skimstreamDQM = cms.FilteredStream(
1909 responsible = skimstream.responsible,
1910 name = skimstream.name+
'DQM',
1911 paths = skimstream.paths,
1912 selectEvents = skimstream.selectEvents,
1913 content = self.
_options.datatier+
'EventContent',
1914 dataTier = cms.untracked.string(self.
_options.datatier)
1917 for i
in range(skimlist.count(shortname)):
1918 skimlist.remove(shortname)
1920 if (skimlist.__len__()!=0
and sequence!=
"all"):
1921 print(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1922 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1926 ''' Enrich the schedule with a user defined sequence ''' 1932 """ Enrich the schedule with the postreco step """ 1939 print(f
"{stepSpec} in preparing validation")
1941 from Validation.Configuration.autoValidation
import autoValidation
1943 if sequence.find(
',')!=-1:
1944 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1945 valSeqName=sequence.split(
',')[1].
split(
'+')
1950 prevalSeqName=sequence.split(
'+')
1951 valSeqName=sequence.split(
'+')
1957 postfix=
'_'+sequence
1958 prevalSeqName=[
'prevalidation'+postfix]
1959 valSeqName=[
'validation'+postfix]
1960 if not hasattr(self.
process,valSeqName[0]):
1962 valSeqName=[sequence]
1974 for s
in valSeqName+prevalSeqName:
1977 for (i,s)
in enumerate(prevalSeqName):
1979 setattr(self.
process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.
process, s)) )
1982 for (i,s)
in enumerate(valSeqName):
1983 setattr(self.
process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.
process, s)))
1990 if not 'DIGI' in self.
stepMap and not self.
_options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1991 if self.
_options.restoreRNDSeeds==
False and not self.
_options.restoreRNDSeeds==
True:
1998 self.
_options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
2000 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
2002 for (i,s)
in enumerate(valSeqName):
2009 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 2010 It will climb down within PSets, VPSets and VInputTags to find its target""" 2011 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
2017 def doIt(self, pset, base):
2018 if isinstance(pset, cms._Parameterizable):
2019 for name
in pset.parameters_().
keys():
2025 value = getattr(pset, name)
2026 valueType = type(value)
2027 if valueType
in [cms.PSet, cms.untracked.PSet, cms.EDProducer]:
2028 self.
doIt(value,base+
"."+name)
2029 elif valueType
in [cms.VPSet, cms.untracked.VPSet]:
2030 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
2031 elif valueType
in [cms.string, cms.untracked.string]:
2035 elif valueType
in [cms.VInputTag, cms.untracked.VInputTag]:
2036 for (i,n)
in enumerate(value):
2037 if not isinstance(n, cms.InputTag):
2044 elif valueType
in [cms.vstring, cms.untracked.vstring]:
2045 for (i,n)
in enumerate(value):
2048 elif valueType
in [cms.InputTag, cms.untracked.InputTag]:
2051 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
2053 def enter(self,visitee):
2056 label = visitee.label()
2057 except AttributeError:
2058 label =
'<Module not in a Process>' 2060 label =
'other execption' 2061 self.
doIt(visitee, label)
2063 def leave(self,visitee):
2068 print(
"Replacing all InputTag %s => %s"%(oldT,newT))
2071 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 2074 self.
additionalCommands.
append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
2080 if proc == HLTprocess:
2083 if verbosityLevel > 0:
2084 print(
"replacing %s process name - sequence %s will use '%s'" % (HLTprocess, sequence, proc))
2085 verboseVisit = (verbosityLevel > 1)
2086 getattr(self.
process,sequence).visit(
2088 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.
additionalCommands:
2091 'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",), verbose = %s))' 2092 % (sequence, HLTprocess, proc, verboseVisit))
2097 while '@' in repr(seqList)
and level<maxLevel:
2099 for specifiedCommand
in seqList:
2100 if specifiedCommand.startswith(
'@'):
2101 location=specifiedCommand[1:]
2102 if not location
in mapping:
2103 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
2104 mappedTo=mapping[location]
2106 mappedTo=mappedTo[index]
2107 seqList.remove(specifiedCommand)
2108 seqList.extend(mappedTo.split(
'+'))
2111 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
2119 sequenceList=_dqmSeq.split(
'+')
2120 postSequenceList=_dqmSeq.split(
'+')
2121 from DQMOffline.Configuration.autoDQM
import autoDQM
2125 if len(set(sequenceList))!=len(sequenceList):
2127 print(
"Duplicate entries for DQM:, using",sequenceList)
2129 pathName=
'dqmoffline_step' 2130 for (i,_sequence)
in enumerate(sequenceList):
2132 pathName=
'dqmoffline_%d_step'%(i)
2137 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process,_sequence ) ) )
2140 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
2145 pathName=
'dqmofflineOnPAT_step' 2146 for (i,_sequence)
in enumerate(postSequenceList):
2148 if (sequenceList[i]==postSequenceList[i]):
2151 pathName=
'dqmofflineOnPAT_%d_step'%(i)
2153 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process, _sequence ) ) )
2157 """ Enrich the process with harvesting step """ 2158 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self.
_options.harvesting+
'_cff' 2164 harvestingList = sequence.split(
"+")
2165 from DQMOffline.Configuration.autoDQM
import autoDQM
2166 from Validation.Configuration.autoValidation
import autoValidation
2168 combined_mapping = copy.deepcopy( autoDQM )
2169 combined_mapping.update( autoValidation )
2170 self.
expandMapping(harvestingList,combined_mapping,index=-1)
2172 if len(set(harvestingList))!=len(harvestingList):
2173 harvestingList=list(
OrderedSet(harvestingList))
2174 print(
"Duplicate entries for HARVESTING, using",harvestingList)
2176 for name
in harvestingList:
2177 if not name
in harvestingConfig.__dict__:
2178 print(name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2182 harvestingstream = getattr(harvestingConfig,name)
2183 if isinstance(harvestingstream,cms.Path):
2186 if isinstance(harvestingstream,cms.Sequence):
2187 setattr(self.
process,name+
"_step",cms.Path(harvestingstream))
2198 """ Enrich the process with AlCaHarvesting step """ 2200 sequence=stepSpec.split(
".")[-1]
2203 harvestingList = sequence.split(
"+")
2207 from Configuration.AlCa.autoPCL
import autoPCL
2210 for name
in harvestingConfig.__dict__:
2211 harvestingstream = getattr(harvestingConfig,name)
2212 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2214 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2215 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2216 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2217 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2219 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2220 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2221 harvestingList.remove(name)
2223 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2226 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2227 print(
"The following harvesting could not be found : ", harvestingList)
2228 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2243 """ Add useful info for the production. """ 2244 self.
process.configurationMetadata=cms.untracked.PSet\
2245 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2246 name=cms.untracked.string(
"Applications"),
2247 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2255 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2257 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2266 from Configuration.StandardSequences.Eras
import eras
2267 for requestedEra
in self.
_options.era.split(
",") :
2268 modifierStrings.append(requestedEra)
2269 modifierImports.append(eras.pythonCfgLines[requestedEra])
2270 modifiers.append(getattr(eras,requestedEra))
2276 for c
in self.
_options.procModifiers:
2277 thingsImported.extend(c.split(
","))
2278 for pm
in thingsImported:
2279 modifierStrings.append(pm)
2280 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2281 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2287 if len(modifierStrings)>0:
2294 if len(modifiers)>0:
2302 def prepare(self, doChecking = False):
2303 """ Prepare the configuration string and add missing pieces.""" 2315 outputModuleCfgCode=
"" 2321 self.
pythonCfgCode +=
"# import of standard configurations\n" 2326 if not hasattr(self.
process,
"configurationMetadata"):
2348 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2349 tmpOut = cms.EndPath(output)
2350 setattr(self.
process,name+
'OutPath',tmpOut)
2359 for object
in self.
_options.inlineObjects.split(
','):
2362 if not hasattr(self.
process,object):
2363 print(
'cannot inline -'+object+
'- : not known')
2368 if self.
_options.pileup==
'HiMixEmbGEN':
2369 self.
pythonCfgCode +=
"\nprocess.generator.embeddingMode=cms.int32(1)\n" 2373 for path
in self.
process.paths:
2377 for endpath
in self.
process.endpaths:
2385 pathNames = [
'process.'+p.label_()
for p
in self.
schedule]
2386 if self.
process.schedule ==
None:
2387 self.
process.schedule = cms.Schedule()
2389 self.
process.schedule.append(item)
2390 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2393 raise Exception(
'the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2395 for index, item
in enumerate(self.
schedule):
2397 self.
process.schedule.insert(index, item)
2399 self.
process.schedule.append(item)
2401 result =
"# process.schedule imported from cff in HLTrigger.Configuration\n" 2403 result +=
'process.schedule.insert('+
str(index)+
', '+item+
')\n' 2410 self.
process.schedule.associate(getattr(self.
process, labelToAssociate))
2411 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2415 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2418 overrideThreads = (self.
_options.nThreads != 1)
2419 overrideConcurrentLumis = (self.
_options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2420 overrideConcurrentIOVs = (self.
_options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2422 if overrideThreads
or overrideConcurrentLumis
or overrideConcurrentIOVs:
2430 if overrideConcurrentLumis:
2432 self.
process.options.numberOfConcurrentLuminosityBlocks = self.
_options.nConcurrentLumis
2433 if overrideConcurrentIOVs:
2435 self.
process.options.eventSetup.numberOfConcurrentIOVs = self.
_options.nConcurrentIOVs
2437 if self.
_options.accelerators
is not None:
2438 accelerators = self.
_options.accelerators.split(
',')
2440 self.
pythonCfgCode +=
"# Enable only these accelerator backends\n" 2441 self.
pythonCfgCode +=
"process.load('Configuration.StandardSequences.Accelerators_cff')\n" 2442 self.
pythonCfgCode +=
"process.options.accelerators = ['" +
"', '".
join(accelerators) +
"']\n" 2443 self.
process.
load(
'Configuration.StandardSequences.Accelerators_cff')
2444 self.
process.options.accelerators = accelerators
2449 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2450 self.
pythonCfgCode +=
"MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n" 2451 MassReplaceInputTag(self.
process, new=
"rawDataMapperByLabel", old=
"rawDataCollector")
2455 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2463 for path
in self.
process.paths:
2473 print(
"--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2482 if hasattr(self.
process,
"logErrorHarvester"):
2484 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2485 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2486 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2487 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2494 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2495 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2496 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2498 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2501 imports = cms.specialImportRegistry.getSpecialImports()
2502 if len(imports) > 0:
2514 if not self.
_options.io.endswith(
'.io'): self._option.io+=
'.io' 2517 if hasattr(self.
process.source,
"fileNames"):
2518 if len(self.
process.source.fileNames.value()):
2519 ioJson[
'primary']=self.
process.source.fileNames.value()
2520 if hasattr(self.
process.source,
"secondaryFileNames"):
2521 if len(self.
process.source.secondaryFileNames.value()):
2522 ioJson[
'secondary']=self.
process.source.secondaryFileNames.value()
2523 if self.
_options.pileup_input
and (self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:')):
2524 ioJson[
'pileup']=self.
_options.pileup_input[4:]
2526 ioJson[o]=om.fileName.value()
2527 ioJson[
'GT']=self.
process.GlobalTag.globaltag.value()
2531 io.write(json.dumps(ioJson))
2534 def load(self, includeFile)
def prepare_L1(self, stepSpec=None)
def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ='')
def expandMapping(self, seqList, mapping, index=None)
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT', verbosityLevel=1)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
ALPAKA_FN_HOST_ACC ALPAKA_FN_INLINE constexpr float zip(ConstView const &tracks, int32_t i)
def finalizeFastSimHLT(self)
inliner
load the relevant part
def expandNanoMapping(seqList, mapping, key)
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def customiseEarlyDelete(process)
def prepare_CFWRITER(self, stepSpec=None)
def prepare_L1P2GT(self, stepSpec=None)
def prepare_RECOBEFMIX(self, stepSpec="reconstruction")
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, stepSpec=None)
def build_production_info(self, evt_type, evtnumber)
def prepare_RECOSIM(self, stepSpec="recosim")
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def prepare_L1HwVal(self, stepSpec='L1HwVal')
def prepare_DIGI2RAW(self, stepSpec=None)
def prepare_POSTRECO(self, stepSpec=None)
def prepare_SKIM(self, stepSpec="all")
def prepare_ALCAPRODUCER(self, stepSpec=None)
def prepare_HARVESTING(self, stepSpec=None)
def prepare_ALCAOUTPUT(self, stepSpec=None)
def prepare_RAW2DIGI(self, stepSpec="RawToDigi")
def prepare_GEN(self, stepSpec=None)
def prepare_FILTER(self, stepSpec=None)
scheduleIndexOfFirstHLTPath
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
def prepare_PAT(self, stepSpec="miniAOD")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_L1Reco(self, stepSpec="L1Reco")
def prepare_HLT(self, stepSpec=None)
def addCustomiseCmdLine(self)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, stepSpec=None)
def loadAndRemember(self, includeFile)
def prepare_ENDJOB(self, stepSpec='endOfProcess')
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_DQM(self, stepSpec='DQMOffline')
def prepare_ALCAHARVEST(self, stepSpec=None)
def split(sequence, size)
static std::string join(char **cmd)
def prepare_USER(self, stepSpec=None)
def prepare_ALCA(self, stepSpec=None, workflow='full')
def dumpPython(process, name)
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_REPACK(self, stepSpec=None)
def prepare_NANOGEN(self, stepSpec="nanoAOD")
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_L1REPACK(self, stepSpec=None)
def prepare_L1TrackTrigger(self, stepSpec="L1TrackTrigger")
def prepare_RAW2RECO(self, stepSpec=None)
def prepare_NANO(self, stepSpec='')
def prepare_VALIDATION(self, stepSpec='validation')
def lumi_to_run(runs, events_in_sample, events_per_job)
def scheduleSequenceAtEnd(self, seq, prefix)
def prepare_RECO(self, stepSpec="reconstruction")
def prepare_SIM(self, stepSpec=None)
def filesFromList(fileName, s=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def heapProfileOptions(self)
def prepare_PATGEN(self, stepSpec="miniGEN")
def prepare_LHE(self, stepSpec=None)
def completeInputCommand(self)
def prepare_DATAMIX(self, stepSpec=None)
def executeAndRemember(self, command)
def addStandardSequences(self)
nextScheduleIsConditional
put the filtering path in the schedule