3 __version__ =
"$Revision: 1.19 $" 4 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 6 import FWCore.ParameterSet.Config
as cms
7 from FWCore.ParameterSet.Modules
import _Module
10 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
16 from subprocess
import Popen,PIPE
17 import FWCore.ParameterSet.DictTypes
as DictTypes
18 from FWCore.ParameterSet.OrderedSet
import OrderedSet
24 defaultOptions.datamix =
'DataOnSim' 25 defaultOptions.isMC=
False 26 defaultOptions.isData=
True 27 defaultOptions.step=
'' 28 defaultOptions.pileup=
'NoPileUp' 29 defaultOptions.pileup_input =
None 30 defaultOptions.pileup_dasoption =
'' 31 defaultOptions.geometry =
'SimDB' 32 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
33 defaultOptions.magField =
'' 34 defaultOptions.conditions =
None 35 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
36 defaultOptions.harvesting=
'AtRunEnd' 37 defaultOptions.gflash =
False 38 defaultOptions.number = -1
39 defaultOptions.number_out =
None 40 defaultOptions.arguments =
"" 41 defaultOptions.name =
"NO NAME GIVEN" 42 defaultOptions.evt_type =
"" 43 defaultOptions.filein =
"" 44 defaultOptions.dasquery=
"" 45 defaultOptions.dasoption=
"" 46 defaultOptions.secondfilein =
"" 47 defaultOptions.customisation_file = []
48 defaultOptions.customisation_file_unsch = []
49 defaultOptions.customise_commands =
"" 50 defaultOptions.inline_custom=
False 51 defaultOptions.particleTable =
'pythiapdt' 52 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
53 defaultOptions.dirin =
'' 54 defaultOptions.dirout =
'' 55 defaultOptions.filetype =
'EDM' 56 defaultOptions.fileout =
'output.root' 57 defaultOptions.filtername =
'' 58 defaultOptions.lazy_download =
False 59 defaultOptions.custom_conditions =
'' 60 defaultOptions.hltProcess =
'' 61 defaultOptions.eventcontent =
None 62 defaultOptions.datatier =
None 63 defaultOptions.inlineEventContent =
True 64 defaultOptions.inlineObjects =
'' 65 defaultOptions.hideGen=
False 66 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
67 defaultOptions.beamspot=
None 68 defaultOptions.outputDefinition =
'' 69 defaultOptions.inputCommands =
None 70 defaultOptions.outputCommands =
None 71 defaultOptions.inputEventContent =
'' 72 defaultOptions.dropDescendant =
False 73 defaultOptions.relval =
None 74 defaultOptions.profile =
None 75 defaultOptions.heap_profile =
None 76 defaultOptions.isRepacked =
False 77 defaultOptions.restoreRNDSeeds =
False 78 defaultOptions.donotDropOnInput =
'' 79 defaultOptions.python_filename =
'' 80 defaultOptions.io=
None 81 defaultOptions.lumiToProcess=
None 82 defaultOptions.fast=
False 83 defaultOptions.runsAndWeightsForMC =
None 84 defaultOptions.runsScenarioForMC =
None 85 defaultOptions.runsAndWeightsForMCIntegerWeights =
None 86 defaultOptions.runsScenarioForMCIntegerWeights =
None 87 defaultOptions.runUnscheduled =
False 88 defaultOptions.timeoutOutput =
False 89 defaultOptions.nThreads = 1
90 defaultOptions.nStreams = 0
91 defaultOptions.nConcurrentLumis = 0
92 defaultOptions.nConcurrentIOVs = 0
93 defaultOptions.accelerators =
None 97 theObject = getattr(process,name)
98 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
99 return "process."+name+
" = " + theObject.dumpPython()
100 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
101 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 103 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 106 import FWCore.ParameterSet.Config
as cms
109 for line
in open(fileName,
'r'): 110 if line.count(
".root")>=2:
112 entries=line.replace(
"\n",
"").
split()
113 prim.append(entries[0])
114 sec.append(entries[1])
115 elif (line.find(
".root")!=-1):
116 entry=line.replace(
"\n",
"")
120 prim = [f
for f
in prim
if not (f
in file_seen
or file_seen.add(f))]
122 sec = [f
for f
in sec
if not (f
in file_seen
or file_seen.add(f))]
124 if not hasattr(s,
"fileNames"):
125 s.fileNames=cms.untracked.vstring(prim)
127 s.fileNames.extend(prim)
129 if not hasattr(s,
"secondaryFileNames"):
130 s.secondaryFileNames=cms.untracked.vstring(sec)
132 s.secondaryFileNames.extend(sec)
133 print(
"found files: ",prim)
135 raise Exception(
"There are not files in input from the file list")
137 print(
"found parent files:",sec)
142 import FWCore.ParameterSet.Config
as cms
145 print(
"the query is",query)
148 while eC!=0
and count<3:
150 print(
'Sleeping, then retrying DAS')
152 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True, universal_newlines=
True)
154 tupleP = os.waitpid(p.pid, 0)
158 print(
"DAS succeeded after",count,
"attempts",eC)
160 print(
"DAS failed 3 times- I give up")
161 for line
in pipe.split(
'\n'):
162 if line.count(
".root")>=2:
164 entries=line.replace(
"\n",
"").
split()
165 prim.append(entries[0])
166 sec.append(entries[1])
167 elif (line.find(
".root")!=-1):
168 entry=line.replace(
"\n",
"")
171 prim = sorted(list(set(prim)))
172 sec = sorted(list(set(sec)))
174 if not hasattr(s,
"fileNames"):
175 s.fileNames=cms.untracked.vstring(prim)
177 s.fileNames.extend(prim)
179 if not hasattr(s,
"secondaryFileNames"):
180 s.secondaryFileNames=cms.untracked.vstring(sec)
182 s.secondaryFileNames.extend(sec)
183 print(
"found files: ",prim)
185 print(
"found parent files:",sec)
188 def anyOf(listOfKeys,dict,opt=None):
197 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
200 """The main building routines """ 202 def __init__(self, options, process = None, with_output = False, with_input = False ):
203 """options taken from old cmsDriver and optparse """ 205 options.outfile_name = options.dirout+options.fileout
209 if self.
_options.isData
and options.isMC:
210 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
216 if (hasattr(self.
_options,
"outputDefinition")
and \
217 self.
_options.outputDefinition !=
'' and \
218 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self.
_options.outputDefinition)))
or \
219 (hasattr(self.
_options,
"datatier")
and \
222 print(
"removing ENDJOB from steps since not compatible with DQMIO dataTier")
228 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
231 for step
in self.
_options.step.split(
","):
232 if step==
'':
continue 233 stepParts = step.split(
":")
234 stepName = stepParts[0]
235 if stepName
not in stepList
and not stepName.startswith(
're'):
236 raise ValueError(
"Step {} unknown. Available are {}".
format( stepName , sorted(stepList)))
237 if len(stepParts)==1:
239 elif len(stepParts)==2:
241 elif len(stepParts)==3:
242 self.
stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
244 raise ValueError(f
"Step definition {step} invalid")
252 if hasattr(self.
_options,
"no_output_flag")
and self.
_options.no_output_flag:
280 Function to add the igprof profile service so that you can dump in the middle 283 profileOpts = self.
_options.profile.split(
':')
285 profilerInterval = 100
286 profilerFormat =
None 287 profilerJobFormat =
None 293 startEvent = profileOpts.pop(0)
294 if not startEvent.isdigit():
295 raise Exception(
"%s is not a number" % startEvent)
296 profilerStart =
int(startEvent)
298 eventInterval = profileOpts.pop(0)
299 if not eventInterval.isdigit():
300 raise Exception(
"%s is not a number" % eventInterval)
301 profilerInterval =
int(eventInterval)
303 profilerFormat = profileOpts.pop(0)
306 if not profilerFormat:
307 profilerFormat =
"%s___%s___%%I.gz" % (
308 self.
_options.evt_type.replace(
"_cfi",
""),
314 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
315 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
316 elif not profilerJobFormat:
317 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 319 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
324 Function to add the jemalloc heap profile service so that you can dump in the middle 327 profileOpts = self.
_options.profile.split(
':')
329 profilerInterval = 100
330 profilerFormat =
None 331 profilerJobFormat =
None 337 startEvent = profileOpts.pop(0)
338 if not startEvent.isdigit():
339 raise Exception(
"%s is not a number" % startEvent)
340 profilerStart =
int(startEvent)
342 eventInterval = profileOpts.pop(0)
343 if not eventInterval.isdigit():
344 raise Exception(
"%s is not a number" % eventInterval)
345 profilerInterval =
int(eventInterval)
347 profilerFormat = profileOpts.pop(0)
350 if not profilerFormat:
351 profilerFormat =
"%s___%s___%%I.heap" % (
352 self.
_options.evt_type.replace(
"_cfi",
""),
358 if not profilerJobFormat
and profilerFormat.endswith(
".heap"):
359 profilerJobFormat = profilerFormat.replace(
".heap",
"_EndOfJob.heap")
360 elif not profilerJobFormat:
361 profilerJobFormat = profilerFormat +
"_EndOfJob.heap" 363 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
366 includeFile = includeFile.replace(
'/',
'.')
368 return sys.modules[includeFile]
371 """helper routine to load am memorize imports""" 374 includeFile = includeFile.replace(
'/',
'.')
377 return sys.modules[includeFile]
380 """helper routine to remember replace statements""" 382 if not command.strip().startswith(
"#"):
385 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
390 self.
process.options.Rethrow = [
'ProductNotFound']
391 self.
process.options.fileMode =
'FULLMERGE' 396 self.
process.AdaptorConfig = cms.Service(
"AdaptorConfig",
397 stats = cms.untracked.bool(
True),
398 enable = cms.untracked.bool(
True),
399 cacheHint = cms.untracked.string(
"lazy-download"),
400 readHint = cms.untracked.string(
"read-ahead-buffered")
409 self.
process.IgProfService = cms.Service(
"IgProfService",
410 reportFirstEvent = cms.untracked.int32(start),
411 reportEventInterval = cms.untracked.int32(interval),
412 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
413 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
418 self.
process.JeProfService = cms.Service(
"JeProfService",
419 reportFirstEvent = cms.untracked.int32(start),
420 reportEventInterval = cms.untracked.int32(interval),
421 reportToFileAtPostEvent = cms.untracked.string(
"%s"%(eventFormat)),
422 reportToFileAtPostEndJob = cms.untracked.string(
"%s"%(jobFormat)))
426 """Here we decide how many evts will be processed""" 433 """Here the source is built. Priority: file, generator""" 436 def filesFromOption(self):
437 for entry
in self.
_options.filein.split(
','):
439 if entry.startswith(
"filelist:"):
441 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
446 if not hasattr(self.
process.source,
"secondaryFileNames"):
447 raise Exception(
"--secondfilein not compatible with "+self.
_options.filetype+
"input type")
448 for entry
in self.
_options.secondfilein.split(
','):
450 if entry.startswith(
"filelist:"):
452 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
455 self.
process.source.secondaryFileNames.append(self.
_options.dirin+entry)
459 self.
process.source=cms.Source(
"PoolSource",
460 fileNames = cms.untracked.vstring(),
461 secondaryFileNames= cms.untracked.vstring())
462 filesFromOption(self)
463 elif self.
_options.filetype ==
"DAT":
464 self.
process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
465 filesFromOption(self)
466 elif self.
_options.filetype ==
"LHE":
467 self.
process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
468 if self.
_options.filein.startswith(
"lhe:"):
470 args=self.
_options.filein.split(
':')
472 print(
'LHE input from article ',article)
473 location=
'/store/lhe/' 475 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
476 for line
in textOfFiles:
477 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
478 self.
process.source.fileNames.append(location+article+
'/'+fileName)
481 print(
'Issue to load LHE files, please check and try again.')
484 if len(self.
process.source.fileNames)==0:
485 print(
'Issue with empty filename, but can pass line check')
488 self.
process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
490 filesFromOption(self)
492 elif self.
_options.filetype ==
"DQM":
493 self.
process.source=cms.Source(
"DQMRootSource",
494 fileNames = cms.untracked.vstring())
495 filesFromOption(self)
497 elif self.
_options.filetype ==
"DQMDAQ":
499 self.
process.source=cms.Source(
"DQMStreamerReader")
503 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
506 self.
process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
510 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
515 self.
_options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 517 self.
_options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 520 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
521 for command
in self.
_options.inputCommands.split(
','):
523 command = command.strip()
524 if command==
'':
continue 525 self.
process.source.inputCommands.append(command)
526 if not self.
_options.dropDescendant:
527 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
530 import FWCore.PythonUtilities.LumiList
as LumiList
534 if self.
process.source
is None:
535 self.
process.source=cms.Source(
"EmptySource")
541 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
542 if self.
_options.runsAndWeightsForMC:
545 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
546 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMC], str):
547 __import__(RunsAndWeights[self.
_options.runsScenarioForMC])
548 self.
runsAndWeights = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMC]].runProbabilityDistribution
553 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
560 if self.
_options.runsAndWeightsForMCIntegerWeights
or self.
_options.runsScenarioForMCIntegerWeights:
562 raise Exception(
"options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
563 if self.
_options.runsAndWeightsForMCIntegerWeights:
566 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
567 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights], str):
568 __import__(RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights])
569 self.
runsAndWeightsInt = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
575 raise Exception(
"--relval option required when using --runsAndWeightsInt")
577 from SimGeneral.Configuration.LumiToRun
import lumi_to_run
578 total_events, events_per_job = self.
_options.relval.split(
',')
580 self.
additionalCommands.
append(
"process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " +
str(lumi_to_run_mapping) +
"])")
585 """ Add output module to the process """ 589 print(
"--datatier & --eventcontent options ignored")
592 outList = eval(self.
_options.outputDefinition)
593 for (id,outDefDict)
in enumerate(outList):
594 outDefDictStr=outDefDict.__str__()
595 if not isinstance(outDefDict,dict):
596 raise Exception(
"--output needs to be passed a list of dict"+self.
_options.outputDefinition+
" is invalid")
598 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
601 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
602 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
603 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
604 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
605 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
607 if not theModuleLabel:
608 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
609 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
610 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 612 for name
in tryNames:
613 if not hasattr(self.
process,name):
616 if not theModuleLabel:
617 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
619 defaultFileName=self.
_options.outfile_name
621 defaultFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
623 theFileName=self.
_options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
624 if not theFileName.endswith(
'.root'):
628 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
629 if theStreamType==
'DQMIO': theStreamType=
'DQM' 630 if theStreamType==
'ALL':
631 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
633 theEventContent = getattr(self.
process, theStreamType+
"EventContent")
637 if theStreamType==
'ALCARECO' and not theFilterName:
638 theFilterName=
'StreamALCACombined' 641 CppType=
'PoolOutputModule' 643 CppType=
'TimeoutPoolOutputModule' 644 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 645 output = cms.OutputModule(CppType,
646 theEventContent.clone(),
647 fileName = cms.untracked.string(theFileName),
648 dataset = cms.untracked.PSet(
649 dataTier = cms.untracked.string(theTier),
650 filterName = cms.untracked.string(theFilterName))
652 if not theSelectEvent
and hasattr(self.
process,
'generation_step')
and theStreamType!=
'LHE':
653 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
654 if not theSelectEvent
and hasattr(self.
process,
'filtering_step'):
655 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
657 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
660 if not hasattr(output,
'SelectEvents'):
661 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
663 output.SelectEvents.SelectEvents.extend(getattr(self.
process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
666 if hasattr(self.
process,theModuleLabel):
667 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
669 setattr(self.
process,theModuleLabel,output)
670 outputModule=getattr(self.
process,theModuleLabel)
671 setattr(self.
process,theModuleLabel+
'_step',cms.EndPath(outputModule))
672 path=getattr(self.
process,theModuleLabel+
'_step')
675 if not self.
_options.inlineEventContent
and hasattr(self.
process,theStreamType+
"EventContent"):
676 def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process.
"+theStreamType+"EventContent.outputCommands)
"): return label
677 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
678 if theExtraOutputCommands:
679 if not isinstance(theExtraOutputCommands,list):
680 raise Exception(
"extra ouput command in --option must be a list of strings")
681 if hasattr(self.
process,theStreamType+
"EventContent"):
682 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
684 outputModule.outputCommands.extend(theExtraOutputCommands)
686 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
691 streamTypes=self.
_options.eventcontent.split(
',')
692 tiers=self.
_options.datatier.split(
',')
693 if not self.
_options.outputDefinition
and len(streamTypes)!=len(tiers):
694 raise Exception(
"number of event content arguments does not match number of datatier arguments")
700 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
701 if streamType==
'':
continue 702 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self.
_options.step:
continue 703 if streamType==
'DQMIO': streamType=
'DQM' 704 eventContent=streamType
706 if streamType ==
"NANOEDMAOD" :
707 eventContent =
"NANOAOD" 708 elif streamType ==
"NANOEDMAODSIM" :
709 eventContent =
"NANOAODSIM" 710 theEventContent = getattr(self.
process, eventContent+
"EventContent")
712 theFileName=self.
_options.outfile_name
713 theFilterName=self.
_options.filtername
715 theFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
716 theFilterName=self.
_options.filtername
717 CppType=
'PoolOutputModule' 719 CppType=
'TimeoutPoolOutputModule' 720 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 721 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 722 output = cms.OutputModule(CppType,
724 fileName = cms.untracked.string(theFileName),
725 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
726 filterName = cms.untracked.string(theFilterName)
729 if hasattr(self.
process,
"generation_step")
and streamType!=
'LHE':
730 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
731 if hasattr(self.
process,
"filtering_step"):
732 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
734 if streamType==
'ALCARECO':
735 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
737 if "MINIAOD" in streamType:
738 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
741 outputModuleName=streamType+
'output' 742 setattr(self.
process,outputModuleName,output)
743 outputModule=getattr(self.
process,outputModuleName)
744 setattr(self.
process,outputModuleName+
'_step',cms.EndPath(outputModule))
745 path=getattr(self.
process,outputModuleName+
'_step')
748 if self.
_options.outputCommands
and streamType!=
'DQM':
749 for evct
in self.
_options.outputCommands.split(
','):
750 if not evct:
continue 751 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
753 if not self.
_options.inlineEventContent:
754 tmpstreamType=streamType
755 if "NANOEDM" in tmpstreamType :
756 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
757 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
759 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
761 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
767 Add selected standard sequences to the process 771 pileupSpec=self.
_options.pileup.split(
',')[0]
774 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
775 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
776 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
780 if '.' in pileupSpec:
781 mixingDict={
'file':pileupSpec}
782 elif pileupSpec.startswith(
'file:'):
783 mixingDict={
'file':pileupSpec[5:]}
786 mixingDict=copy.copy(Mixing[pileupSpec])
787 if len(self.
_options.pileup.split(
','))>1:
788 mixingDict.update(eval(self.
_options.pileup[self.
_options.pileup.find(
',')+1:]))
791 if 'file:' in pileupSpec:
794 print(
"inlining mixing module configuration")
799 mixingDict.pop(
'file')
802 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
804 elif self.
_options.pileup_input.startswith(
"filelist:"):
807 mixingDict[
'F']=self.
_options.pileup_input.split(
',')
809 for command
in specialization:
811 if len(mixingDict)!=0:
812 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
833 stepSpec = self.
stepMap[stepName]
834 print(
"Step:", stepName,
"Spec:",stepSpec)
835 if stepName.startswith(
're'):
837 if stepName[2:]
not in self.
_options.donotDropOnInput:
838 self.
_options.inputEventContent=
'%s,%s'%(stepName.upper(),self.
_options.inputEventContent)
839 stepName=stepName[2:]
841 getattr(self,
"prepare_"+stepName)(stepSpec = getattr(self,stepName+
"DefaultSeq"))
842 elif isinstance(stepSpec, list):
843 getattr(self,
"prepare_"+stepName)(stepSpec =
'+'.
join(stepSpec))
844 elif isinstance(stepSpec, tuple):
845 getattr(self,
"prepare_"+stepName)(stepSpec =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
847 raise ValueError(
"Invalid step definition")
849 if self.
_options.restoreRNDSeeds!=
False:
851 if self.
_options.restoreRNDSeeds==
True:
852 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
854 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self.
_options.restoreRNDSeeds))
857 self.
_options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 859 self.
_options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 865 def dropSecondDropStar(iec):
875 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
876 for evct
in self.
_options.inputEventContent.split(
','):
877 if evct==
'':
continue 878 theEventContent = getattr(self.
process, evct+
"EventContent")
879 if hasattr(theEventContent,
'outputCommands'):
880 self.
process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
881 if hasattr(theEventContent,
'inputCommands'):
882 self.
process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
884 dropSecondDropStar(self.
process.source.inputCommands)
886 if not self.
_options.dropDescendant:
887 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
893 """Add conditions to the process""" 894 if not self.
_options.conditions:
return 896 if 'FrontierConditions_GlobalTag' in self.
_options.conditions:
897 print(
'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
898 self.
_options.conditions = self.
_options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
901 from Configuration.AlCa.GlobalTag
import GlobalTag
908 """Include the customise code """ 912 for c
in self.
_options.customisation_file:
913 custOpt.extend(c.split(
","))
915 for c
in self.
_options.customisation_file_unsch:
916 custOpt.extend(c.split(
","))
922 raise Exception(
"more than . in the specification:"+opt)
923 fileName=opt.split(
'.')[0]
924 if opt.count(
'.')==0: rest=
'customise' 926 rest=opt.split(
'.')[1]
927 if rest==
'py': rest=
'customise' 929 if fileName
in custMap:
930 custMap[fileName].extend(rest.split(
'+'))
932 custMap[fileName]=rest.split(
'+')
937 final_snippet=
'\n# customisation of the process.\n' 941 allFcn.extend(custMap[opt])
943 if allFcn.count(fcn)!=1:
944 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
948 packageName = f.replace(
".py",
"").
replace(
"/",
".")
949 __import__(packageName)
950 package = sys.modules[packageName]
953 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
955 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 957 for line
in file(customiseFile,
'r'): 958 if "import FWCore.ParameterSet.Config" in line:
960 final_snippet += line
962 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
963 for fcn
in custMap[f]:
964 print(
"customising the process with",fcn,
"from",f)
965 if not hasattr(package,fcn):
967 raise Exception(
"config "+f+
" has no function "+fcn)
971 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
972 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
975 final_snippet +=
'\n# End of customisation functions\n' 981 final_snippet=
'\n# Customisation from command line\n' 982 if self.
_options.customise_commands:
984 for com
in self.
_options.customise_commands.split(
'\\n'):
987 final_snippet +=
'\n'+com
998 if self.
_options.particleTable
not in defaultOptions.particleTableList:
999 print(
'Invalid particle table provided. Options are:')
1000 print(defaultOptions.particleTable)
1008 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreams_cff" 1041 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 1080 self.
_options.beamspot=VtxSmearedDefaultKey
1085 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 1086 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 1089 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1098 if self.
_options.scenario==
'cosmics':
1100 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1101 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1102 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1112 if self.
_options.scenario==
'HeavyIons':
1114 self.
_options.beamspot=VtxSmearedHIDefaultKey
1119 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1121 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1124 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1136 self.
magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self.
_options.magField.replace(
'.',
'')+
'_cff' 1140 self.
GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff' 1144 if 'start' in self.
_options.conditions.lower():
1145 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1147 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1149 def inGeometryKeys(opt):
1150 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1151 if opt
in GeometryConf:
1152 return GeometryConf[opt]
1156 geoms=self.
_options.geometry.split(
',')
1157 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).
split(
',')
1160 if '/' in geoms[1]
or '_cff' in geoms[1]:
1163 self.
GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1165 if (geoms[0].startswith(
'DB:')):
1166 self.
SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff' 1170 if '/' in geoms[0]
or '_cff' in geoms[0]:
1173 simGeometry=geoms[0]
1175 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1177 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1180 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1181 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1184 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1189 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1190 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1197 if self.
_options.pileup==
'default':
1198 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1199 self.
_options.pileup=MixingDefaultKey
1212 output = cms.OutputModule(
"PoolOutputModule")
1213 if stream.selectEvents.parameters_().__len__()!=0:
1214 output.SelectEvents = stream.selectEvents
1216 output.SelectEvents = cms.untracked.PSet()
1217 output.SelectEvents.SelectEvents=cms.vstring()
1218 if isinstance(stream.paths,tuple):
1219 for path
in stream.paths:
1220 output.SelectEvents.SelectEvents.append(path.label())
1222 output.SelectEvents.SelectEvents.append(stream.paths.label())
1226 if isinstance(stream.content,str):
1227 evtPset=getattr(self.process,stream.content)
1228 for p
in evtPset.parameters_():
1229 setattr(output,p,getattr(evtPset,p))
1230 if not self._options.inlineEventContent:
1231 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1233 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1235 output.outputCommands = stream.content
1238 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1240 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1241 filterName = cms.untracked.string(stream.name))
1243 if self._options.filtername:
1244 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1247 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1249 if workflow
in (
"producers,full"):
1250 if isinstance(stream.paths,tuple):
1251 for path
in stream.paths:
1252 self.schedule.
append(path)
1254 self.schedule.
append(stream.paths)
1258 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1259 self.additionalOutputs[name] = output
1260 setattr(self.process,name,output)
1262 if workflow ==
'output':
1264 filterList = output.SelectEvents.SelectEvents
1265 for i, filter
in enumerate(filterList):
1266 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1276 _dotsplit = stepSpec.split(
'.')
1277 if ( len(_dotsplit)==1 ):
1278 if '/' in _dotsplit[0]:
1279 _sequence = defaultSEQ
if defaultSEQ
else stepSpec
1282 _sequence = stepSpec
1284 elif ( len(_dotsplit)==2 ):
1285 _cff,_sequence = _dotsplit
1287 print(
"sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1288 print(stepSpec,
"not recognized")
1291 return l,_sequence,_cff
1296 for i,s
in enumerate(seq.split(
'*')):
1298 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, s) ))
1300 p=getattr(self.
process,prefix)
1301 tmp = getattr(self.
process, s)
1302 if isinstance(tmp, cms.Task):
1313 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, seq) ))
1316 for i,s
in enumerate(seq.split(
'+')):
1318 setattr(self.
process,sn,getattr(cms,what)( getattr(self.
process, s) ))
1332 def prepare_ALCA(self, stepSpec = None, workflow = 'full'):
1333 """ Enrich the process with alca streams """ 1338 alcaList = sequence.split(
"+")
1339 for alca
in alcaList:
1340 if (len(alca)>MAXLEN):
1341 raise Exception(
"The following alca "+
str(alca)+
" name (with length "+
str(len(alca))+
" chars) cannot be accepted because it exceeds the DBS constraints on the length of the name of the ALCARECOs producers ("+
str(MAXLEN)+
")!")
1344 from Configuration.AlCa.autoAlca
import autoAlca, AlCaNoConcurrentLumis
1348 for name
in alcaConfig.__dict__:
1349 alcastream = getattr(alcaConfig,name)
1350 shortName = name.replace(
'ALCARECOStream',
'')
1351 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1352 if shortName
in AlCaNoConcurrentLumis:
1353 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".
format(shortName))
1356 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1357 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1359 if 'DQM' in alcaList:
1360 if not self.
_options.inlineEventContent
and hasattr(self.
process,name):
1361 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1363 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1367 if isinstance(alcastream.paths,tuple):
1368 for path
in alcastream.paths:
1373 for i
in range(alcaList.count(shortName)):
1374 alcaList.remove(shortName)
1377 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1378 path = getattr(alcaConfig,name)
1380 alcaList.remove(
'DQM')
1382 if isinstance(alcastream,cms.Path):
1387 if len(alcaList) != 0:
1389 for name
in alcaConfig.__dict__:
1390 alcastream = getattr(alcaConfig,name)
1391 if isinstance(alcastream,cms.FilteredStream):
1392 available.append(name.replace(
'ALCARECOStream',
''))
1393 print(
"The following alcas could not be found "+
str(alcaList))
1394 print(
"available ",available)
1396 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1401 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1402 print(
"Loading lhe fragment from",loadFragment)
1403 __import__(loadFragment)
1404 self.process.
load(loadFragment)
1406 self._options.inlineObjects+=
','+stepSpec
1408 getattr(self.process,stepSpec).nEvents = self._options.number
1411 self.process.lhe_step = cms.Path( getattr( self.process,stepSpec) )
1412 self.excludedPaths.
append(
"lhe_step")
1413 self.schedule.
append( self.process.lhe_step )
1416 """ load the fragment of generator configuration """ 1423 if not '/' in loadFragment:
1424 loadFragment=
'Configuration.Generator.'+loadFragment
1426 loadFragment=loadFragment.replace(
'/',
'.')
1428 print(
"Loading generator fragment from",loadFragment)
1429 __import__(loadFragment)
1434 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1437 from Configuration.Generator.concurrentLumisDisable
import noConcurrentLumiGenerators
1439 generatorModule=sys.modules[loadFragment]
1440 genModules=generatorModule.__dict__
1451 import FWCore.ParameterSet.Modules
as cmstypes
1452 for name
in genModules:
1453 theObject = getattr(generatorModule,name)
1454 if isinstance(theObject, cmstypes._Module):
1456 if theObject.type_()
in noConcurrentLumiGenerators:
1457 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".
format(theObject.type_()))
1460 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1461 self.
_options.inlineObjects+=
','+name
1463 if stepSpec == self.
GENDefaultSeq or stepSpec ==
'pgen_genonly' or stepSpec ==
'pgen_smear':
1464 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1466 elif 'generator' in genModules:
1469 """ Enrich the schedule with the rest of the generation step """ 1474 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1475 cffToBeLoaded=VtxSmeared[self.
_options.beamspot]
1478 raise Exception(
"VertexSmearing type or beamspot "+self.
_options.beamspot+
" unknown.")
1480 if self.
_options.scenario ==
'HeavyIons':
1481 if self.
_options.pileup==
'HiMixGEN':
1482 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1483 elif self.
_options.pileup==
'HiMixEmbGEN':
1484 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorEmbMix_cff")
1486 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1488 self.
process.generation_step = cms.Path( getattr(self.
process,_genSeqName) )
1492 self.
executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1498 """ Enrich the schedule with the summary of the filter step """ 1505 """ Enrich the schedule with the simulation step""" 1515 self.
executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1521 """ Enrich the schedule with the digitisation step""" 1525 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1527 if _digiSeq ==
'pdigi_valid' or _digiSeq ==
'pdigi_hi':
1528 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1530 if _digiSeq !=
'pdigi_nogen' and _digiSeq !=
'pdigi_valid_nogen' and _digiSeq !=
'pdigi_hi_nogen' and not self.
process.source.type_()==
'EmptySource' and not self.
_options.filetype ==
"LHE":
1531 if self.
_options.inputEventContent==
'':
1532 self.
_options.inputEventContent=
'REGEN' 1541 """ Enrich the schedule with the crossing frame writer step""" 1547 """ Enrich the schedule with the digitisation step""" 1553 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
1555 elif self.
_options.pileup_input.startswith(
"filelist:"):
1558 theFiles=self.
_options.pileup_input.split(
',')
1560 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1575 """ Enrich the schedule with the L1 simulation step""" 1582 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1583 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1584 if stepSpec
in supported:
1585 self.
loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'% stepSpec)
1586 if self.
_options.scenario ==
'HeavyIons':
1590 print(
"L1REPACK with '",stepSpec,
"' is not supported! Supported choices are: ",supported)
1594 """ Enrich the schedule with the HLT simulation step""" 1596 print(
"no specification of the hlt menu has been given, should never happen")
1597 raise Exception(
'no HLT specifications provided')
1601 from Configuration.HLT.autoHLT
import autoHLT
1604 stepSpec = autoHLT[key]
1606 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1612 if self.
_options.scenario ==
'HeavyIons':
1613 optionsForHLT[
'type'] =
'HIon' 1615 optionsForHLT[
'type'] =
'GRun' 1616 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in optionsForHLT.items())
1617 if stepSpec ==
'run,fromSource':
1618 if hasattr(self.
process.source,
'firstRun'):
1619 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1620 elif hasattr(self.
process.source,
'setRunNumber'):
1621 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1623 raise Exception(f
'Cannot replace menu to load {stepSpec}')
1625 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(stepSpec.replace(
',',
':'),optionsForHLTConfig))
1630 self.
_options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1636 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1639 if self.
process.schedule ==
None:
1640 raise Exception(
'the HLT step did not attach a valid schedule to the process')
1647 if not hasattr(self.
process,
'HLTEndSequence'):
1648 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1653 seqReco,seqDigi=stepSpec.spli(
',')
1655 print(f
"RAW2RECO requires two specifications {stepSpec} insufficient")
1669 for filt
in allMetFilterPaths:
1673 ''' Enrich the schedule with L1 HW validation ''' 1675 print(
'\n\n\n DEPRECATED this has no action \n\n\n')
1679 ''' Enrich the schedule with L1 reconstruction ''' 1685 ''' Enrich the schedule with L1 reconstruction ''' 1691 ''' Enrich the schedule with a user defined filter sequence ''' 1693 filterConfig,filterSeq = stepSpec.split(
'.')
1694 filterConfig=self.
load(filterConfig)
1696 class PrintAllModules(
object):
1700 def enter(self,visitee):
1702 label=visitee.label()
1707 def leave(self,v):
pass 1709 expander=PrintAllModules()
1710 getattr(self.
process,filterSeq).visit( expander )
1711 self.
_options.inlineObjects+=
','+expander.inliner
1712 self.
_options.inlineObjects+=
','+filterSeq
1723 ''' Enrich the schedule with reconstruction ''' 1729 ''' Enrich the schedule with reconstruction ''' 1735 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1737 print(
"ERROR: this step is only implemented for FastSim")
1744 ''' Enrich the schedule with PAT ''' 1749 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1752 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1754 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1757 if len(self.
_options.customise_commands) > 1:
1758 self.
_options.customise_commands = self.
_options.customise_commands +
" \n" 1759 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patTrigger.processName = \""+self.
_options.hltProcess+
"\"\n" 1760 self.
_options.customise_commands = self.
_options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1761 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1768 ''' Enrich the schedule with PATGEN ''' 1772 raise Exception(
"PATGEN step can only run on MC")
1777 ''' Enrich the schedule with NANO ''' 1781 from PhysicsTools.NanoAOD.autoNANO
import autoNANO, expandNanoMapping
1783 _nanoCustoms = _nanoSeq.split(
'+')
if '@' in stepSpec
else [
'']
1784 _nanoSeq = _nanoSeq.split(
'+')
1788 _nanoSeq = list(sorted(set(_nanoSeq), key=_nanoSeq.index))
1789 _nanoCustoms = list(sorted(set(_nanoCustoms), key=_nanoCustoms.index))
1791 _nanoSeq = [seq
if seq!=
'' else self.
NANODefaultSeq for seq
in _nanoSeq]
1792 _nanoCustoms = [cust
if cust!=
'' else self.
NANODefaultCustom for cust
in _nanoCustoms]
1794 if len(_nanoSeq) < 1
and '@' in stepSpec:
1795 raise Exception(f
'The specified mapping: {stepSpec} generates an empty NANO sequence. Please provide a valid mappign')
1799 for custom
in _nanoCustoms:
1800 custom_path = custom
if '.' in custom
else '.'.
join([_nanoCff,custom])
1802 self.
_options.customisation_file.append(custom_path)
1804 if len(self.
_options.customise_commands) > 1:
1805 self.
_options.customise_commands = self.
_options.customise_commands +
" \n" 1806 self.
_options.customise_commands = self.
_options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1809 ''' Enrich the schedule with NANOGEN ''' 1811 fromGen =
any([x
in self.
stepMap for x
in [
'LHE',
'GEN',
'AOD']])
1814 custom =
"customizeNanoGEN" if fromGen
else "customizeNanoGENFromMini" 1816 self.
_options.customisation_file_unsch.insert(0,
'.'.
join([_nanogenCff, custom]))
1818 self.
_options.customisation_file.insert(0,
'.'.
join([_nanogenCff, custom]))
1821 ''' Enrich the schedule with skimming fragments''' 1824 stdHLTProcName =
'HLT' 1825 newHLTProcName = self.
_options.hltProcess
1826 customiseForReHLT = (newHLTProcName
or (stdHLTProcName
in self.
stepMap))
and (newHLTProcName != stdHLTProcName)
1827 if customiseForReHLT:
1828 print(
"replacing %s process name - step SKIM:%s will use '%s'" % (stdHLTProcName, sequence, newHLTProcName))
1831 from Configuration.Skimming.autoSkim
import autoSkim
1832 skimlist = sequence.split(
'+')
1836 for skim
in skimConfig.__dict__:
1837 skimstream = getattr(skimConfig, skim)
1840 if isinstance(skimstream, cms.Path):
1843 elif isinstance(skimstream, cms.Sequence):
1844 if customiseForReHLT:
1847 if not isinstance(skimstream, cms.FilteredStream):
1850 shortname = skim.replace(
'SKIMStream',
'')
1851 if (sequence==
"all"):
1853 elif (shortname
in skimlist):
1858 skimstreamDQM = cms.FilteredStream(
1859 responsible = skimstream.responsible,
1860 name = skimstream.name+
'DQM',
1861 paths = skimstream.paths,
1862 selectEvents = skimstream.selectEvents,
1863 content = self.
_options.datatier+
'EventContent',
1864 dataTier = cms.untracked.string(self.
_options.datatier)
1867 for i
in range(skimlist.count(shortname)):
1868 skimlist.remove(shortname)
1870 if (skimlist.__len__()!=0
and sequence!=
"all"):
1871 print(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1872 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1876 ''' Enrich the schedule with a user defined sequence ''' 1882 """ Enrich the schedule with the postreco step """ 1889 print(f
"{stepSpec} in preparing validation")
1891 from Validation.Configuration.autoValidation
import autoValidation
1893 if sequence.find(
',')!=-1:
1894 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1895 valSeqName=sequence.split(
',')[1].
split(
'+')
1900 prevalSeqName=sequence.split(
'+')
1901 valSeqName=sequence.split(
'+')
1907 postfix=
'_'+sequence
1908 prevalSeqName=[
'prevalidation'+postfix]
1909 valSeqName=[
'validation'+postfix]
1910 if not hasattr(self.
process,valSeqName[0]):
1912 valSeqName=[sequence]
1924 for s
in valSeqName+prevalSeqName:
1927 for (i,s)
in enumerate(prevalSeqName):
1929 setattr(self.
process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.
process, s)) )
1932 for (i,s)
in enumerate(valSeqName):
1933 setattr(self.
process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.
process, s)))
1940 if not 'DIGI' in self.
stepMap and not self.
_options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1941 if self.
_options.restoreRNDSeeds==
False and not self.
_options.restoreRNDSeeds==
True:
1948 self.
_options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1950 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
1952 for (i,s)
in enumerate(valSeqName):
1959 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1960 It will climb down within PSets, VPSets and VInputTags to find its target""" 1961 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1967 def doIt(self, pset, base):
1968 if isinstance(pset, cms._Parameterizable):
1969 for name
in pset.parameters_().
keys():
1975 value = getattr(pset, name)
1976 valueType = type(value)
1977 if valueType
in [cms.PSet, cms.untracked.PSet, cms.EDProducer]:
1978 self.
doIt(value,base+
"."+name)
1979 elif valueType
in [cms.VPSet, cms.untracked.VPSet]:
1980 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1981 elif valueType
in [cms.string, cms.untracked.string]:
1985 elif valueType
in [cms.VInputTag, cms.untracked.VInputTag]:
1986 for (i,n)
in enumerate(value):
1987 if not isinstance(n, cms.InputTag):
1994 elif valueType
in [cms.vstring, cms.untracked.vstring]:
1995 for (i,n)
in enumerate(value):
1998 elif valueType
in [cms.InputTag, cms.untracked.InputTag]:
2001 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
2003 def enter(self,visitee):
2006 label = visitee.label()
2007 except AttributeError:
2008 label =
'<Module not in a Process>' 2010 label =
'other execption' 2011 self.
doIt(visitee, label)
2013 def leave(self,visitee):
2018 print(
"Replacing all InputTag %s => %s"%(oldT,newT))
2021 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 2024 self.
additionalCommands.
append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
2030 if proc == HLTprocess:
2033 if verbosityLevel > 0:
2034 print(
"replacing %s process name - sequence %s will use '%s'" % (HLTprocess, sequence, proc))
2035 verboseVisit = (verbosityLevel > 1)
2036 getattr(self.
process,sequence).visit(
2038 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.
additionalCommands:
2041 'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",), verbose = %s))' 2042 % (sequence, HLTprocess, proc, verboseVisit))
2047 while '@' in repr(seqList)
and level<maxLevel:
2049 for specifiedCommand
in seqList:
2050 if specifiedCommand.startswith(
'@'):
2051 location=specifiedCommand[1:]
2052 if not location
in mapping:
2053 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
2054 mappedTo=mapping[location]
2056 mappedTo=mappedTo[index]
2057 seqList.remove(specifiedCommand)
2058 seqList.extend(mappedTo.split(
'+'))
2061 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
2069 sequenceList=_dqmSeq.split(
'+')
2070 postSequenceList=_dqmSeq.split(
'+')
2071 from DQMOffline.Configuration.autoDQM
import autoDQM
2075 if len(set(sequenceList))!=len(sequenceList):
2077 print(
"Duplicate entries for DQM:, using",sequenceList)
2079 pathName=
'dqmoffline_step' 2080 for (i,_sequence)
in enumerate(sequenceList):
2082 pathName=
'dqmoffline_%d_step'%(i)
2087 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process,_sequence ) ) )
2090 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
2095 pathName=
'dqmofflineOnPAT_step' 2096 for (i,_sequence)
in enumerate(postSequenceList):
2098 if (sequenceList[i]==postSequenceList[i]):
2101 pathName=
'dqmofflineOnPAT_%d_step'%(i)
2103 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process, _sequence ) ) )
2107 """ Enrich the process with harvesting step """ 2108 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self.
_options.harvesting+
'_cff' 2114 harvestingList = sequence.split(
"+")
2115 from DQMOffline.Configuration.autoDQM
import autoDQM
2116 from Validation.Configuration.autoValidation
import autoValidation
2118 combined_mapping = copy.deepcopy( autoDQM )
2119 combined_mapping.update( autoValidation )
2120 self.
expandMapping(harvestingList,combined_mapping,index=-1)
2122 if len(set(harvestingList))!=len(harvestingList):
2123 harvestingList=list(
OrderedSet(harvestingList))
2124 print(
"Duplicate entries for HARVESTING, using",harvestingList)
2126 for name
in harvestingList:
2127 if not name
in harvestingConfig.__dict__:
2128 print(name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2132 harvestingstream = getattr(harvestingConfig,name)
2133 if isinstance(harvestingstream,cms.Path):
2136 if isinstance(harvestingstream,cms.Sequence):
2137 setattr(self.
process,name+
"_step",cms.Path(harvestingstream))
2148 """ Enrich the process with AlCaHarvesting step """ 2150 sequence=stepSpec.split(
".")[-1]
2153 harvestingList = sequence.split(
"+")
2157 from Configuration.AlCa.autoPCL
import autoPCL
2160 for name
in harvestingConfig.__dict__:
2161 harvestingstream = getattr(harvestingConfig,name)
2162 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2164 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2165 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2166 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2167 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2169 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2170 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2171 harvestingList.remove(name)
2173 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2176 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2177 print(
"The following harvesting could not be found : ", harvestingList)
2178 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2193 """ Add useful info for the production. """ 2194 self.
process.configurationMetadata=cms.untracked.PSet\
2195 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2196 name=cms.untracked.string(
"Applications"),
2197 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2205 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2207 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2216 from Configuration.StandardSequences.Eras
import eras
2217 for requestedEra
in self.
_options.era.split(
",") :
2218 modifierStrings.append(requestedEra)
2219 modifierImports.append(eras.pythonCfgLines[requestedEra])
2220 modifiers.append(getattr(eras,requestedEra))
2226 for c
in self.
_options.procModifiers:
2227 thingsImported.extend(c.split(
","))
2228 for pm
in thingsImported:
2229 modifierStrings.append(pm)
2230 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2231 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2237 if len(modifierStrings)>0:
2244 if len(modifiers)>0:
2252 def prepare(self, doChecking = False):
2253 """ Prepare the configuration string and add missing pieces.""" 2265 outputModuleCfgCode=
"" 2271 self.
pythonCfgCode +=
"# import of standard configurations\n" 2276 if not hasattr(self.
process,
"configurationMetadata"):
2298 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2299 tmpOut = cms.EndPath(output)
2300 setattr(self.
process,name+
'OutPath',tmpOut)
2309 for object
in self.
_options.inlineObjects.split(
','):
2312 if not hasattr(self.
process,object):
2313 print(
'cannot inline -'+object+
'- : not known')
2318 if self.
_options.pileup==
'HiMixEmbGEN':
2319 self.
pythonCfgCode +=
"\nprocess.generator.embeddingMode=cms.int32(1)\n" 2323 for path
in self.
process.paths:
2327 for endpath
in self.
process.endpaths:
2335 pathNames = [
'process.'+p.label_()
for p
in self.
schedule]
2336 if self.
process.schedule ==
None:
2337 self.
process.schedule = cms.Schedule()
2339 self.
process.schedule.append(item)
2340 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2343 raise Exception(
'the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2345 for index, item
in enumerate(self.
schedule):
2347 self.
process.schedule.insert(index, item)
2349 self.
process.schedule.append(item)
2351 result =
"# process.schedule imported from cff in HLTrigger.Configuration\n" 2353 result +=
'process.schedule.insert('+
str(index)+
', '+item+
')\n' 2360 self.
process.schedule.associate(getattr(self.
process, labelToAssociate))
2361 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2365 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2368 overrideThreads = (self.
_options.nThreads != 1)
2369 overrideConcurrentLumis = (self.
_options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2370 overrideConcurrentIOVs = (self.
_options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2372 if overrideThreads
or overrideConcurrentLumis
or overrideConcurrentIOVs:
2380 if overrideConcurrentLumis:
2382 self.
process.options.numberOfConcurrentLuminosityBlocks = self.
_options.nConcurrentLumis
2383 if overrideConcurrentIOVs:
2385 self.
process.options.eventSetup.numberOfConcurrentIOVs = self.
_options.nConcurrentIOVs
2387 if self.
_options.accelerators
is not None:
2388 accelerators = self.
_options.accelerators.split(
',')
2390 self.
pythonCfgCode +=
"# Enable only these accelerator backends\n" 2391 self.
pythonCfgCode +=
"process.load('Configuration.StandardSequences.Accelerators_cff')\n" 2392 self.
pythonCfgCode +=
"process.options.accelerators = ['" +
"', '".
join(accelerators) +
"']\n" 2393 self.
process.
load(
'Configuration.StandardSequences.Accelerators_cff')
2394 self.
process.options.accelerators = accelerators
2399 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2400 self.
pythonCfgCode +=
"MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n" 2401 MassReplaceInputTag(self.
process, new=
"rawDataMapperByLabel", old=
"rawDataCollector")
2405 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2413 for path
in self.
process.paths:
2423 print(
"--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2432 if hasattr(self.
process,
"logErrorHarvester"):
2434 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2435 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2436 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2437 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2444 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2445 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2446 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2448 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2451 imports = cms.specialImportRegistry.getSpecialImports()
2452 if len(imports) > 0:
2464 if not self.
_options.io.endswith(
'.io'): self._option.io+=
'.io' 2467 if hasattr(self.
process.source,
"fileNames"):
2468 if len(self.
process.source.fileNames.value()):
2469 ioJson[
'primary']=self.
process.source.fileNames.value()
2470 if hasattr(self.
process.source,
"secondaryFileNames"):
2471 if len(self.
process.source.secondaryFileNames.value()):
2472 ioJson[
'secondary']=self.
process.source.secondaryFileNames.value()
2473 if self.
_options.pileup_input
and (self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:')):
2474 ioJson[
'pileup']=self.
_options.pileup_input[4:]
2476 ioJson[o]=om.fileName.value()
2477 ioJson[
'GT']=self.
process.GlobalTag.globaltag.value()
2481 io.write(json.dumps(ioJson))
2484 def load(self, includeFile)
def prepare_L1(self, stepSpec=None)
def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ='')
def expandMapping(self, seqList, mapping, index=None)
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT', verbosityLevel=1)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def finalizeFastSimHLT(self)
inliner
load the relevant part
def expandNanoMapping(seqList, mapping, key)
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def customiseEarlyDelete(process)
def prepare_CFWRITER(self, stepSpec=None)
def prepare_RECOBEFMIX(self, stepSpec="reconstruction")
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, stepSpec=None)
def build_production_info(self, evt_type, evtnumber)
def prepare_RECOSIM(self, stepSpec="recosim")
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def prepare_L1HwVal(self, stepSpec='L1HwVal')
def prepare_DIGI2RAW(self, stepSpec=None)
def prepare_POSTRECO(self, stepSpec=None)
def prepare_SKIM(self, stepSpec="all")
def prepare_ALCAPRODUCER(self, stepSpec=None)
def prepare_HARVESTING(self, stepSpec=None)
def prepare_ALCAOUTPUT(self, stepSpec=None)
def prepare_RAW2DIGI(self, stepSpec="RawToDigi")
def prepare_GEN(self, stepSpec=None)
def prepare_FILTER(self, stepSpec=None)
scheduleIndexOfFirstHLTPath
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
def prepare_PAT(self, stepSpec="miniAOD")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_L1Reco(self, stepSpec="L1Reco")
def prepare_HLT(self, stepSpec=None)
def addCustomiseCmdLine(self)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, stepSpec=None)
def loadAndRemember(self, includeFile)
def prepare_ENDJOB(self, stepSpec='endOfProcess')
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_DQM(self, stepSpec='DQMOffline')
def prepare_ALCAHARVEST(self, stepSpec=None)
def split(sequence, size)
static std::string join(char **cmd)
def prepare_USER(self, stepSpec=None)
def prepare_ALCA(self, stepSpec=None, workflow='full')
def dumpPython(process, name)
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_REPACK(self, stepSpec=None)
def prepare_NANOGEN(self, stepSpec="nanoAOD")
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_L1REPACK(self, stepSpec=None)
def prepare_L1TrackTrigger(self, stepSpec="L1TrackTrigger")
def prepare_RAW2RECO(self, stepSpec=None)
def prepare_NANO(self, stepSpec='')
def prepare_VALIDATION(self, stepSpec='validation')
def lumi_to_run(runs, events_in_sample, events_per_job)
def scheduleSequenceAtEnd(self, seq, prefix)
def prepare_RECO(self, stepSpec="reconstruction")
def prepare_SIM(self, stepSpec=None)
def filesFromList(fileName, s=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def heapProfileOptions(self)
def prepare_PATGEN(self, stepSpec="miniGEN")
def prepare_LHE(self, stepSpec=None)
def completeInputCommand(self)
def prepare_DATAMIX(self, stepSpec=None)
def executeAndRemember(self, command)
def addStandardSequences(self)
nextScheduleIsConditional
put the filtering path in the schedule