3 from __future__
import print_function
4 __version__ =
"$Revision: 1.19 $" 5 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 7 import FWCore.ParameterSet.Config
as cms
8 from FWCore.ParameterSet.Modules
import _Module
11 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
17 from subprocess
import Popen,PIPE
18 import FWCore.ParameterSet.DictTypes
as DictTypes
24 defaultOptions.datamix =
'DataOnSim' 25 defaultOptions.isMC=
False 26 defaultOptions.isData=
True 27 defaultOptions.step=
'' 28 defaultOptions.pileup=
'NoPileUp' 29 defaultOptions.pileup_input =
None 30 defaultOptions.pileup_dasoption =
'' 31 defaultOptions.geometry =
'SimDB' 32 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
33 defaultOptions.magField =
'' 34 defaultOptions.conditions =
None 35 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
36 defaultOptions.harvesting=
'AtRunEnd' 37 defaultOptions.gflash =
False 38 defaultOptions.number = -1
39 defaultOptions.number_out =
None 40 defaultOptions.arguments =
"" 41 defaultOptions.name =
"NO NAME GIVEN" 42 defaultOptions.evt_type =
"" 43 defaultOptions.filein =
"" 44 defaultOptions.dasquery=
"" 45 defaultOptions.dasoption=
"" 46 defaultOptions.secondfilein =
"" 47 defaultOptions.customisation_file = []
48 defaultOptions.customisation_file_unsch = []
49 defaultOptions.customise_commands =
"" 50 defaultOptions.inline_custom=
False 51 defaultOptions.particleTable =
'pythiapdt' 52 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
53 defaultOptions.dirin =
'' 54 defaultOptions.dirout =
'' 55 defaultOptions.filetype =
'EDM' 56 defaultOptions.fileout =
'output.root' 57 defaultOptions.filtername =
'' 58 defaultOptions.lazy_download =
False 59 defaultOptions.custom_conditions =
'' 60 defaultOptions.hltProcess =
'' 61 defaultOptions.eventcontent =
None 62 defaultOptions.datatier =
None 63 defaultOptions.inlineEventContent =
True 64 defaultOptions.inlineObjets =
'' 65 defaultOptions.hideGen=
False 66 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
67 defaultOptions.beamspot=
None 68 defaultOptions.outputDefinition =
'' 69 defaultOptions.inputCommands =
None 70 defaultOptions.outputCommands =
None 71 defaultOptions.inputEventContent =
'' 72 defaultOptions.dropDescendant =
False 73 defaultOptions.relval =
None 74 defaultOptions.profile =
None 75 defaultOptions.isRepacked =
False 76 defaultOptions.restoreRNDSeeds =
False 77 defaultOptions.donotDropOnInput =
'' 78 defaultOptions.python_filename =
'' 79 defaultOptions.io=
None 80 defaultOptions.lumiToProcess=
None 81 defaultOptions.fast=
False 82 defaultOptions.runsAndWeightsForMC =
None 83 defaultOptions.runsScenarioForMC =
None 84 defaultOptions.runsAndWeightsForMCIntegerWeights =
None 85 defaultOptions.runsScenarioForMCIntegerWeights =
None 86 defaultOptions.runUnscheduled =
False 87 defaultOptions.timeoutOutput =
False 88 defaultOptions.nThreads =
'1' 89 defaultOptions.nStreams =
'0' 90 defaultOptions.nConcurrentLumis =
'0' 91 defaultOptions.nConcurrentIOVs =
'0' 92 defaultOptions.accelerators =
None 96 theObject = getattr(process,name)
97 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
98 return "process."+name+
" = " + theObject.dumpPython()
99 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
100 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 102 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 105 import FWCore.ParameterSet.Config
as cms
108 for line
in open(fileName,
'r'): 109 if line.count(
".root")>=2:
111 entries=line.replace(
"\n",
"").
split()
112 prim.append(entries[0])
113 sec.append(entries[1])
114 elif (line.find(
".root")!=-1):
115 entry=line.replace(
"\n",
"")
119 prim = [f
for f
in prim
if not (f
in file_seen
or file_seen.add(f))]
121 sec = [f
for f
in sec
if not (f
in file_seen
or file_seen.add(f))]
123 if not hasattr(s,
"fileNames"):
124 s.fileNames=cms.untracked.vstring(prim)
126 s.fileNames.extend(prim)
128 if not hasattr(s,
"secondaryFileNames"):
129 s.secondaryFileNames=cms.untracked.vstring(sec)
131 s.secondaryFileNames.extend(sec)
132 print(
"found files: ",prim)
134 raise Exception(
"There are not files in input from the file list")
136 print(
"found parent files:",sec)
141 import FWCore.ParameterSet.Config
as cms
144 print(
"the query is",query)
147 while eC!=0
and count<3:
149 print(
'Sleeping, then retrying DAS')
151 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True, universal_newlines=
True)
153 tupleP = os.waitpid(p.pid, 0)
157 print(
"DAS succeeded after",count,
"attempts",eC)
159 print(
"DAS failed 3 times- I give up")
160 for line
in pipe.split(
'\n'):
161 if line.count(
".root")>=2:
163 entries=line.replace(
"\n",
"").
split()
164 prim.append(entries[0])
165 sec.append(entries[1])
166 elif (line.find(
".root")!=-1):
167 entry=line.replace(
"\n",
"")
170 prim = sorted(list(set(prim)))
171 sec = sorted(list(set(sec)))
173 if not hasattr(s,
"fileNames"):
174 s.fileNames=cms.untracked.vstring(prim)
176 s.fileNames.extend(prim)
178 if not hasattr(s,
"secondaryFileNames"):
179 s.secondaryFileNames=cms.untracked.vstring(sec)
181 s.secondaryFileNames.extend(sec)
182 print(
"found files: ",prim)
184 print(
"found parent files:",sec)
187 def anyOf(listOfKeys,dict,opt=None):
196 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
199 """The main building routines """ 201 def __init__(self, options, process = None, with_output = False, with_input = False ):
202 """options taken from old cmsDriver and optparse """ 204 options.outfile_name = options.dirout+options.fileout
208 if self.
_options.isData
and options.isMC:
209 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
215 if (hasattr(self.
_options,
"outputDefinition")
and \
216 self.
_options.outputDefinition !=
'' and \
217 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self.
_options.outputDefinition)))
or \
218 (hasattr(self.
_options,
"datatier")
and \
221 print(
"removing ENDJOB from steps since not compatible with DQMIO dataTier")
227 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
230 for step
in self.
_options.step.split(
","):
231 if step==
'':
continue 232 stepParts = step.split(
":")
233 stepName = stepParts[0]
234 if stepName
not in stepList
and not stepName.startswith(
're'):
235 raise ValueError(
"Step "+stepName+
" unknown")
236 if len(stepParts)==1:
238 elif len(stepParts)==2:
240 elif len(stepParts)==3:
241 self.
stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
243 raise ValueError(
"Step definition "+step+
" invalid")
251 if hasattr(self.
_options,
"no_output_flag")
and self.
_options.no_output_flag:
279 Function to add the igprof profile service so that you can dump in the middle 282 profileOpts = self.
_options.profile.split(
':')
284 profilerInterval = 100
285 profilerFormat =
None 286 profilerJobFormat =
None 292 startEvent = profileOpts.pop(0)
293 if not startEvent.isdigit():
294 raise Exception(
"%s is not a number" % startEvent)
295 profilerStart =
int(startEvent)
297 eventInterval = profileOpts.pop(0)
298 if not eventInterval.isdigit():
299 raise Exception(
"%s is not a number" % eventInterval)
300 profilerInterval =
int(eventInterval)
302 profilerFormat = profileOpts.pop(0)
305 if not profilerFormat:
306 profilerFormat =
"%s___%s___%%I.gz" % (
307 self.
_options.evt_type.replace(
"_cfi",
""),
313 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
314 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
315 elif not profilerJobFormat:
316 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 318 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
321 includeFile = includeFile.replace(
'/',
'.')
323 return sys.modules[includeFile]
326 """helper routine to load am memorize imports""" 329 includeFile = includeFile.replace(
'/',
'.')
332 return sys.modules[includeFile]
335 """helper routine to remember replace statements""" 337 if not command.strip().startswith(
"#"):
340 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
345 self.
process.options.Rethrow = [
'ProductNotFound']
346 self.
process.options.fileMode =
'FULLMERGE' 351 self.
process.AdaptorConfig = cms.Service(
"AdaptorConfig",
352 stats = cms.untracked.bool(
True),
353 enable = cms.untracked.bool(
True),
354 cacheHint = cms.untracked.string(
"lazy-download"),
355 readHint = cms.untracked.string(
"read-ahead-buffered")
364 self.
process.IgProfService = cms.Service(
"IgProfService",
365 reportFirstEvent = cms.untracked.int32(start),
366 reportEventInterval = cms.untracked.int32(interval),
367 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
368 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
372 """Here we decide how many evts will be processed""" 379 """Here the source is built. Priority: file, generator""" 382 def filesFromOption(self):
383 for entry
in self.
_options.filein.split(
','):
385 if entry.startswith(
"filelist:"):
387 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
392 if not hasattr(self.
process.source,
"secondaryFileNames"):
393 raise Exception(
"--secondfilein not compatible with "+self.
_options.filetype+
"input type")
394 for entry
in self.
_options.secondfilein.split(
','):
396 if entry.startswith(
"filelist:"):
398 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
401 self.
process.source.secondaryFileNames.append(self.
_options.dirin+entry)
405 self.
process.source=cms.Source(
"PoolSource",
406 fileNames = cms.untracked.vstring(),
407 secondaryFileNames= cms.untracked.vstring())
408 filesFromOption(self)
409 elif self.
_options.filetype ==
"DAT":
410 self.
process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
411 filesFromOption(self)
412 elif self.
_options.filetype ==
"LHE":
413 self.
process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
414 if self.
_options.filein.startswith(
"lhe:"):
416 args=self.
_options.filein.split(
':')
418 print(
'LHE input from article ',article)
419 location=
'/store/lhe/' 421 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
422 for line
in textOfFiles:
423 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
424 self.
process.source.fileNames.append(location+article+
'/'+fileName)
427 print(
'Issue to load LHE files, please check and try again.')
430 if len(self.
process.source.fileNames)==0:
431 print(
'Issue with empty filename, but can pass line check')
434 self.
process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
436 filesFromOption(self)
438 elif self.
_options.filetype ==
"DQM":
439 self.
process.source=cms.Source(
"DQMRootSource",
440 fileNames = cms.untracked.vstring())
441 filesFromOption(self)
443 elif self.
_options.filetype ==
"DQMDAQ":
445 self.
process.source=cms.Source(
"DQMStreamerReader")
449 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
452 self.
process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
456 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
461 self.
_options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 463 self.
_options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 466 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
467 for command
in self.
_options.inputCommands.split(
','):
469 command = command.strip()
470 if command==
'':
continue 471 self.
process.source.inputCommands.append(command)
472 if not self.
_options.dropDescendant:
473 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
476 import FWCore.PythonUtilities.LumiList
as LumiList
480 if self.
process.source
is None:
481 self.
process.source=cms.Source(
"EmptySource")
487 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
488 if self.
_options.runsAndWeightsForMC:
491 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
492 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMC], str):
493 __import__(RunsAndWeights[self.
_options.runsScenarioForMC])
494 self.
runsAndWeights = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMC]].runProbabilityDistribution
499 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
506 if self.
_options.runsAndWeightsForMCIntegerWeights
or self.
_options.runsScenarioForMCIntegerWeights:
508 raise Exception(
"options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
509 if self.
_options.runsAndWeightsForMCIntegerWeights:
512 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
513 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights], str):
514 __import__(RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights])
515 self.
runsAndWeightsInt = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
521 raise Exception(
"--relval option required when using --runsAndWeightsInt")
523 from SimGeneral.Configuration.LumiToRun
import lumi_to_run
524 total_events, events_per_job = self.
_options.relval.split(
',')
526 self.
additionalCommands.
append(
"process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " +
str(lumi_to_run_mapping) +
"])")
531 """ Add output module to the process """ 535 print(
"--datatier & --eventcontent options ignored")
538 outList = eval(self.
_options.outputDefinition)
539 for (id,outDefDict)
in enumerate(outList):
540 outDefDictStr=outDefDict.__str__()
541 if not isinstance(outDefDict,dict):
542 raise Exception(
"--output needs to be passed a list of dict"+self.
_options.outputDefinition+
" is invalid")
544 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
547 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
548 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
549 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
550 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
551 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
553 if not theModuleLabel:
554 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
555 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
556 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 558 for name
in tryNames:
559 if not hasattr(self.
process,name):
562 if not theModuleLabel:
563 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
565 defaultFileName=self.
_options.outfile_name
567 defaultFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
569 theFileName=self.
_options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
570 if not theFileName.endswith(
'.root'):
574 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
575 if theStreamType==
'DQMIO': theStreamType=
'DQM' 576 if theStreamType==
'ALL':
577 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
579 theEventContent = getattr(self.
process, theStreamType+
"EventContent")
583 if theStreamType==
'ALCARECO' and not theFilterName:
584 theFilterName=
'StreamALCACombined' 587 CppType=
'PoolOutputModule' 589 CppType=
'TimeoutPoolOutputModule' 590 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 591 output = cms.OutputModule(CppType,
592 theEventContent.clone(),
593 fileName = cms.untracked.string(theFileName),
594 dataset = cms.untracked.PSet(
595 dataTier = cms.untracked.string(theTier),
596 filterName = cms.untracked.string(theFilterName))
598 if not theSelectEvent
and hasattr(self.
process,
'generation_step')
and theStreamType!=
'LHE':
599 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
600 if not theSelectEvent
and hasattr(self.
process,
'filtering_step'):
601 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
603 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
606 if not hasattr(output,
'SelectEvents'):
607 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
609 output.SelectEvents.SelectEvents.extend(getattr(self.
process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
612 if hasattr(self.
process,theModuleLabel):
613 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
615 setattr(self.
process,theModuleLabel,output)
616 outputModule=getattr(self.
process,theModuleLabel)
617 setattr(self.
process,theModuleLabel+
'_step',cms.EndPath(outputModule))
618 path=getattr(self.
process,theModuleLabel+
'_step')
621 if not self.
_options.inlineEventContent
and hasattr(self.
process,theStreamType+
"EventContent"):
622 def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process.
"+theStreamType+"EventContent.outputCommands)
"): return label
623 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
624 if theExtraOutputCommands:
625 if not isinstance(theExtraOutputCommands,list):
626 raise Exception(
"extra ouput command in --option must be a list of strings")
627 if hasattr(self.
process,theStreamType+
"EventContent"):
628 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
630 outputModule.outputCommands.extend(theExtraOutputCommands)
632 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
637 streamTypes=self.
_options.eventcontent.split(
',')
638 tiers=self.
_options.datatier.split(
',')
639 if not self.
_options.outputDefinition
and len(streamTypes)!=len(tiers):
640 raise Exception(
"number of event content arguments does not match number of datatier arguments")
646 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
647 if streamType==
'':
continue 648 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self.
_options.step:
continue 649 if streamType==
'DQMIO': streamType=
'DQM' 650 eventContent=streamType
652 if streamType ==
"NANOEDMAOD" :
653 eventContent =
"NANOAOD" 654 elif streamType ==
"NANOEDMAODSIM" :
655 eventContent =
"NANOAODSIM" 656 theEventContent = getattr(self.
process, eventContent+
"EventContent")
658 theFileName=self.
_options.outfile_name
659 theFilterName=self.
_options.filtername
661 theFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
662 theFilterName=self.
_options.filtername
663 CppType=
'PoolOutputModule' 665 CppType=
'TimeoutPoolOutputModule' 666 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 667 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 668 output = cms.OutputModule(CppType,
670 fileName = cms.untracked.string(theFileName),
671 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
672 filterName = cms.untracked.string(theFilterName)
675 if hasattr(self.
process,
"generation_step")
and streamType!=
'LHE':
676 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
677 if hasattr(self.
process,
"filtering_step"):
678 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
680 if streamType==
'ALCARECO':
681 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
683 if "MINIAOD" in streamType:
684 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
687 outputModuleName=streamType+
'output' 688 setattr(self.
process,outputModuleName,output)
689 outputModule=getattr(self.
process,outputModuleName)
690 setattr(self.
process,outputModuleName+
'_step',cms.EndPath(outputModule))
691 path=getattr(self.
process,outputModuleName+
'_step')
694 if self.
_options.outputCommands
and streamType!=
'DQM':
695 for evct
in self.
_options.outputCommands.split(
','):
696 if not evct:
continue 697 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
699 if not self.
_options.inlineEventContent:
700 tmpstreamType=streamType
701 if "NANOEDM" in tmpstreamType :
702 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
703 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
705 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
707 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
713 Add selected standard sequences to the process 717 pileupSpec=self.
_options.pileup.split(
',')[0]
720 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
721 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
722 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
726 if '.' in pileupSpec:
727 mixingDict={
'file':pileupSpec}
728 elif pileupSpec.startswith(
'file:'):
729 mixingDict={
'file':pileupSpec[5:]}
732 mixingDict=copy.copy(Mixing[pileupSpec])
733 if len(self.
_options.pileup.split(
','))>1:
734 mixingDict.update(eval(self.
_options.pileup[self.
_options.pileup.find(
',')+1:]))
737 if 'file:' in pileupSpec:
740 print(
"inlining mixing module configuration")
745 mixingDict.pop(
'file')
748 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
750 elif self.
_options.pileup_input.startswith(
"filelist:"):
753 mixingDict[
'F']=self.
_options.pileup_input.split(
',')
755 for command
in specialization:
757 if len(mixingDict)!=0:
758 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
779 stepSpec = self.
stepMap[stepName]
780 print(
"Step:", stepName,
"Spec:",stepSpec)
781 if stepName.startswith(
're'):
783 if stepName[2:]
not in self.
_options.donotDropOnInput:
784 self.
_options.inputEventContent=
'%s,%s'%(stepName.upper(),self.
_options.inputEventContent)
785 stepName=stepName[2:]
787 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
788 elif isinstance(stepSpec, list):
789 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
790 elif isinstance(stepSpec, tuple):
791 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
793 raise ValueError(
"Invalid step definition")
795 if self.
_options.restoreRNDSeeds!=
False:
797 if self.
_options.restoreRNDSeeds==
True:
798 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
800 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self.
_options.restoreRNDSeeds))
803 self.
_options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 805 self.
_options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 811 def dropSecondDropStar(iec):
821 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
822 for evct
in self.
_options.inputEventContent.split(
','):
823 if evct==
'':
continue 824 theEventContent = getattr(self.
process, evct+
"EventContent")
825 if hasattr(theEventContent,
'outputCommands'):
826 self.
process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
827 if hasattr(theEventContent,
'inputCommands'):
828 self.
process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
830 dropSecondDropStar(self.
process.source.inputCommands)
832 if not self.
_options.dropDescendant:
833 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
839 """Add conditions to the process""" 840 if not self.
_options.conditions:
return 842 if 'FrontierConditions_GlobalTag' in self.
_options.conditions:
843 print(
'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
844 self.
_options.conditions = self.
_options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
847 from Configuration.AlCa.GlobalTag
import GlobalTag
854 """Include the customise code """ 858 for c
in self.
_options.customisation_file:
859 custOpt.extend(c.split(
","))
861 for c
in self.
_options.customisation_file_unsch:
862 custOpt.extend(c.split(
","))
868 raise Exception(
"more than . in the specification:"+opt)
869 fileName=opt.split(
'.')[0]
870 if opt.count(
'.')==0: rest=
'customise' 872 rest=opt.split(
'.')[1]
873 if rest==
'py': rest=
'customise' 875 if fileName
in custMap:
876 custMap[fileName].extend(rest.split(
'+'))
878 custMap[fileName]=rest.split(
'+')
883 final_snippet=
'\n# customisation of the process.\n' 887 allFcn.extend(custMap[opt])
889 if allFcn.count(fcn)!=1:
890 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
894 packageName = f.replace(
".py",
"").
replace(
"/",
".")
895 __import__(packageName)
896 package = sys.modules[packageName]
899 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
901 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 903 for line
in file(customiseFile,
'r'): 904 if "import FWCore.ParameterSet.Config" in line:
906 final_snippet += line
908 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
909 for fcn
in custMap[f]:
910 print(
"customising the process with",fcn,
"from",f)
911 if not hasattr(package,fcn):
913 raise Exception(
"config "+f+
" has no function "+fcn)
917 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
918 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
921 final_snippet +=
'\n# End of customisation functions\n' 927 final_snippet=
'\n# Customisation from command line\n' 928 if self.
_options.customise_commands:
930 for com
in self.
_options.customise_commands.split(
'\\n'):
933 final_snippet +=
'\n'+com
944 if self.
_options.particleTable
not in defaultOptions.particleTableList:
945 print(
'Invalid particle table provided. Options are:')
946 print(defaultOptions.particleTable)
954 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreams_cff" 987 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 1025 self.
_options.beamspot=VtxSmearedDefaultKey
1030 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 1031 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 1034 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1043 if self.
_options.scenario==
'cosmics':
1045 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1046 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1047 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1057 if self.
_options.scenario==
'HeavyIons':
1059 self.
_options.beamspot=VtxSmearedHIDefaultKey
1064 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1066 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1069 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1081 self.
magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self.
_options.magField.replace(
'.',
'')+
'_cff' 1085 self.
GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff' 1089 if 'start' in self.
_options.conditions.lower():
1090 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1092 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1094 def inGeometryKeys(opt):
1095 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1096 if opt
in GeometryConf:
1097 return GeometryConf[opt]
1101 geoms=self.
_options.geometry.split(
',')
1102 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).
split(
',')
1105 if '/' in geoms[1]
or '_cff' in geoms[1]:
1108 self.
GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1110 if (geoms[0].startswith(
'DB:')):
1111 self.
SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff' 1115 if '/' in geoms[0]
or '_cff' in geoms[0]:
1118 simGeometry=geoms[0]
1120 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1122 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1125 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1126 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1129 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1134 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1135 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1142 if self.
_options.pileup==
'default':
1143 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1144 self.
_options.pileup=MixingDefaultKey
1157 output = cms.OutputModule(
"PoolOutputModule")
1158 if stream.selectEvents.parameters_().__len__()!=0:
1159 output.SelectEvents = stream.selectEvents
1161 output.SelectEvents = cms.untracked.PSet()
1162 output.SelectEvents.SelectEvents=cms.vstring()
1163 if isinstance(stream.paths,tuple):
1164 for path
in stream.paths:
1165 output.SelectEvents.SelectEvents.append(path.label())
1167 output.SelectEvents.SelectEvents.append(stream.paths.label())
1171 if isinstance(stream.content,str):
1172 evtPset=getattr(self.process,stream.content)
1173 for p
in evtPset.parameters_():
1174 setattr(output,p,getattr(evtPset,p))
1175 if not self._options.inlineEventContent:
1176 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1178 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1180 output.outputCommands = stream.content
1183 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1185 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1186 filterName = cms.untracked.string(stream.name))
1188 if self._options.filtername:
1189 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1192 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1194 if workflow
in (
"producers,full"):
1195 if isinstance(stream.paths,tuple):
1196 for path
in stream.paths:
1197 self.schedule.
append(path)
1199 self.schedule.
append(stream.paths)
1203 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1204 self.additionalOutputs[name] = output
1205 setattr(self.process,name,output)
1207 if workflow ==
'output':
1209 filterList = output.SelectEvents.SelectEvents
1210 for i, filter
in enumerate(filterList):
1211 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1221 if ( len(sequence.split(
'.'))==1 ):
1223 elif ( len(sequence.split(
'.'))==2 ):
1225 sequence=sequence.split(
'.')[1]
1227 print(
"sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1228 print(sequence,
"not recognized")
1235 for i,s
in enumerate(seq.split(
'*')):
1237 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, s) ))
1239 p=getattr(self.
process,prefix)
1240 tmp = getattr(self.
process, s)
1241 if isinstance(tmp, cms.Task):
1252 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, seq) ))
1255 for i,s
in enumerate(seq.split(
'+')):
1257 setattr(self.
process,sn,getattr(cms,what)( getattr(self.
process, s) ))
1271 def prepare_ALCA(self, sequence = None, workflow = 'full'):
1272 """ Enrich the process with alca streams """ 1274 sequence = sequence.split(
'.')[-1]
1277 alcaList = sequence.split(
"+")
1279 from Configuration.AlCa.autoAlca
import autoAlca, AlCaNoConcurrentLumis
1283 for name
in alcaConfig.__dict__:
1284 alcastream = getattr(alcaConfig,name)
1285 shortName = name.replace(
'ALCARECOStream',
'')
1286 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1287 if shortName
in AlCaNoConcurrentLumis:
1288 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".
format(shortName))
1289 self.
_options.nConcurrentLumis =
"1" 1290 self.
_options.nConcurrentIOVs =
"1" 1291 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1292 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1294 if 'DQM' in alcaList:
1295 if not self.
_options.inlineEventContent
and hasattr(self.
process,name):
1296 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1298 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1302 if isinstance(alcastream.paths,tuple):
1303 for path
in alcastream.paths:
1308 for i
in range(alcaList.count(shortName)):
1309 alcaList.remove(shortName)
1312 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1313 path = getattr(alcaConfig,name)
1315 alcaList.remove(
'DQM')
1317 if isinstance(alcastream,cms.Path):
1322 if len(alcaList) != 0:
1324 for name
in alcaConfig.__dict__:
1325 alcastream = getattr(alcaConfig,name)
1326 if isinstance(alcastream,cms.FilteredStream):
1327 available.append(name.replace(
'ALCARECOStream',
''))
1328 print(
"The following alcas could not be found "+
str(alcaList))
1329 print(
"available ",available)
1331 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1336 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1337 print(
"Loading lhe fragment from",loadFragment)
1338 __import__(loadFragment)
1339 self.process.
load(loadFragment)
1341 self._options.inlineObjets+=
','+sequence
1343 getattr(self.process,sequence).nEvents =
int(self._options.number)
1346 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1347 self.excludedPaths.
append(
"lhe_step")
1348 self.schedule.
append( self.process.lhe_step )
1351 """ load the fragment of generator configuration """ 1358 if not '/' in loadFragment:
1359 loadFragment=
'Configuration.Generator.'+loadFragment
1361 loadFragment=loadFragment.replace(
'/',
'.')
1363 print(
"Loading generator fragment from",loadFragment)
1364 __import__(loadFragment)
1369 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1372 from Configuration.Generator.concurrentLumisDisable
import noConcurrentLumiGenerators
1374 generatorModule=sys.modules[loadFragment]
1375 genModules=generatorModule.__dict__
1386 import FWCore.ParameterSet.Modules
as cmstypes
1387 for name
in genModules:
1388 theObject = getattr(generatorModule,name)
1389 if isinstance(theObject, cmstypes._Module):
1391 if theObject.type_()
in noConcurrentLumiGenerators:
1392 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".
format(theObject.type_()))
1393 self.
_options.nConcurrentLumis =
"1" 1394 self.
_options.nConcurrentIOVs =
"1" 1395 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1396 self.
_options.inlineObjets+=
','+name
1398 if sequence == self.
GENDefaultSeq or sequence ==
'pgen_genonly' or stepSpec ==
'pgen_smear':
1399 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1401 elif 'generator' in genModules:
1404 """ Enrich the schedule with the rest of the generation step """ 1406 genSeqName=sequence.split(
'.')[-1]
1410 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1411 cffToBeLoaded=VtxSmeared[self.
_options.beamspot]
1414 raise Exception(
"VertexSmearing type or beamspot "+self.
_options.beamspot+
" unknown.")
1416 if self.
_options.scenario ==
'HeavyIons':
1417 if self.
_options.pileup==
'HiMixGEN':
1418 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1419 elif self.
_options.pileup==
'HiMixEmbGEN':
1420 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorEmbMix_cff")
1422 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1424 self.
process.generation_step = cms.Path( getattr(self.
process,genSeqName) )
1428 self.
executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1434 """ Enrich the schedule with the summary of the filter step """ 1441 """ Enrich the schedule with the simulation step""" 1451 self.
executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1457 """ Enrich the schedule with the digitisation step""" 1461 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1463 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1464 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1466 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and sequence !=
'pdigi_hi_nogen' and not self.
process.source.type_()==
'EmptySource' and not self.
_options.filetype ==
"LHE":
1467 if self.
_options.inputEventContent==
'':
1468 self.
_options.inputEventContent=
'REGEN' 1477 """ Enrich the schedule with the crossing frame writer step""" 1483 """ Enrich the schedule with the digitisation step""" 1489 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
1491 elif self.
_options.pileup_input.startswith(
"filelist:"):
1494 theFiles=self.
_options.pileup_input.split(
',')
1496 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1511 """ Enrich the schedule with the L1 simulation step""" 1518 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1519 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1520 if sequence
in supported:
1521 self.
loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1522 if self.
_options.scenario ==
'HeavyIons':
1526 print(
"L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported)
1530 """ Enrich the schedule with the HLT simulation step""" 1532 print(
"no specification of the hlt menu has been given, should never happen")
1533 raise Exception(
'no HLT sequence provided')
1537 from Configuration.HLT.autoHLT
import autoHLT
1540 sequence = autoHLT[key]
1542 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1548 if self.
_options.scenario ==
'HeavyIons':
1549 optionsForHLT[
'type'] =
'HIon' 1551 optionsForHLT[
'type'] =
'GRun' 1552 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in optionsForHLT.items())
1553 if sequence ==
'run,fromSource':
1554 if hasattr(self.
process.source,
'firstRun'):
1555 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1556 elif hasattr(self.
process.source,
'setRunNumber'):
1557 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1559 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1561 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1566 self.
_options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1572 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1575 if self.
process.schedule ==
None:
1576 raise Exception(
'the HLT step did not attach a valid schedule to the process')
1583 if not hasattr(self.
process,
'HLTEndSequence'):
1584 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1589 seqReco=sequence.split(
',')[1]
1590 seqDigi=sequence.split(
',')[0]
1592 print(
"RAW2RECO requires two specifications",sequence,
"insufficient")
1608 for filt
in allMetFilterPaths:
1612 ''' Enrich the schedule with L1 HW validation ''' 1615 print(
'\n\n\n DEPRECATED this has no action \n\n\n')
1619 ''' Enrich the schedule with L1 reconstruction ''' 1625 ''' Enrich the schedule with L1 reconstruction ''' 1631 ''' Enrich the schedule with a user defined filter sequence ''' 1633 filterConfig=self.
load(sequence.split(
'.')[0])
1634 filterSeq=sequence.split(
'.')[-1]
1636 class PrintAllModules(
object):
1640 def enter(self,visitee):
1642 label=visitee.label()
1647 def leave(self,v):
pass 1649 expander=PrintAllModules()
1650 getattr(self.
process,filterSeq).visit( expander )
1651 self.
_options.inlineObjets+=
','+expander.inliner
1652 self.
_options.inlineObjets+=
','+filterSeq
1663 ''' Enrich the schedule with reconstruction ''' 1669 ''' Enrich the schedule with reconstruction ''' 1675 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1677 print(
"ERROR: this step is only implemented for FastSim")
1680 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1684 ''' Enrich the schedule with PAT ''' 1689 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1692 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1694 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1697 if len(self.
_options.customise_commands) > 1:
1698 self.
_options.customise_commands = self.
_options.customise_commands +
" \n" 1699 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patTrigger.processName = \""+self.
_options.hltProcess+
"\"\n" 1700 self.
_options.customise_commands = self.
_options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1701 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1708 ''' Enrich the schedule with PATGEN ''' 1712 raise Exception(
"PATGEN step can only run on MC")
1716 ''' Enrich the schedule with NANO ''' 1719 custom =
"nanoAOD_customizeData" if self.
_options.isData
else "nanoAOD_customizeMC" 1720 self.
_options.customisation_file.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1722 if len(self.
_options.customise_commands) > 1:
1723 self.
_options.customise_commands = self.
_options.customise_commands +
" \n" 1724 self.
_options.customise_commands = self.
_options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1727 ''' Enrich the schedule with NANOGEN ''' 1729 fromGen =
any([x
in self.
stepMap for x
in [
'LHE',
'GEN',
'AOD']])
1732 custom =
"customizeNanoGEN" if fromGen
else "customizeNanoGENFromMini" 1739 ''' Enrich the schedule with skimming fragments''' 1741 sequence = sequence.split(
'.')[-1]
1743 skimlist=sequence.split(
'+')
1745 from Configuration.Skimming.autoSkim
import autoSkim
1749 for skim
in skimConfig.__dict__:
1750 skimstream = getattr(skimConfig,skim)
1751 if isinstance(skimstream,cms.Path):
1754 if (
not isinstance(skimstream,cms.FilteredStream)):
1756 shortname = skim.replace(
'SKIMStream',
'')
1757 if (sequence==
"all"):
1759 elif (shortname
in skimlist):
1764 skimstreamDQM = cms.FilteredStream(
1765 responsible = skimstream.responsible,
1766 name = skimstream.name+
'DQM',
1767 paths = skimstream.paths,
1768 selectEvents = skimstream.selectEvents,
1769 content = self.
_options.datatier+
'EventContent',
1770 dataTier = cms.untracked.string(self.
_options.datatier)
1773 for i
in range(skimlist.count(shortname)):
1774 skimlist.remove(shortname)
1778 if (skimlist.__len__()!=0
and sequence!=
"all"):
1779 print(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1780 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1783 ''' Enrich the schedule with a user defined sequence ''' 1789 """ Enrich the schedule with the postreco step """ 1796 print(sequence,
"in preparing validation")
1798 from Validation.Configuration.autoValidation
import autoValidation
1800 sequence=sequence.split(
'.')[-1]
1801 if sequence.find(
',')!=-1:
1802 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1803 valSeqName=sequence.split(
',')[1].
split(
'+')
1808 prevalSeqName=sequence.split(
'+')
1809 valSeqName=sequence.split(
'+')
1815 postfix=
'_'+sequence
1816 prevalSeqName=[
'prevalidation'+postfix]
1817 valSeqName=[
'validation'+postfix]
1818 if not hasattr(self.
process,valSeqName[0]):
1820 valSeqName=[sequence]
1832 for s
in valSeqName+prevalSeqName:
1835 for (i,s)
in enumerate(prevalSeqName):
1837 setattr(self.
process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.
process, s)) )
1840 for (i,s)
in enumerate(valSeqName):
1841 setattr(self.
process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.
process, s)))
1848 if not 'DIGI' in self.
stepMap and not self.
_options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1849 if self.
_options.restoreRNDSeeds==
False and not self.
_options.restoreRNDSeeds==
True:
1856 self.
_options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1858 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
1860 for (i,s)
in enumerate(valSeqName):
1867 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1868 It will climb down within PSets, VPSets and VInputTags to find its target""" 1869 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1875 def doIt(self,pset,base):
1876 if isinstance(pset, cms._Parameterizable):
1877 for name
in pset.parameters_().
keys():
1883 value = getattr(pset,name)
1884 type = value.pythonTypeName()
1885 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1886 self.
doIt(value,base+
"."+name)
1887 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1888 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1889 elif type
in (
'cms.string',
'cms.untracked.string'):
1893 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1894 for (i,n)
in enumerate(value):
1895 if not isinstance(n, cms.InputTag):
1902 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1903 for (i,n)
in enumerate(value):
1906 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1909 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1911 def enter(self,visitee):
1914 label = visitee.label()
1915 except AttributeError:
1916 label =
'<Module not in a Process>' 1918 label =
'other execption' 1919 self.
doIt(visitee, label)
1921 def leave(self,visitee):
1926 print(
"Replacing all InputTag %s => %s"%(oldT,newT))
1929 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1932 self.
additionalCommands.
append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1940 if proc==HLTprocess:
return 1942 print(
"replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc))
1944 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.
additionalCommands:
1946 self.
additionalCommands.
append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1952 while '@' in repr(seqList)
and level<maxLevel:
1954 for specifiedCommand
in seqList:
1955 if specifiedCommand.startswith(
'@'):
1956 location=specifiedCommand[1:]
1957 if not location
in mapping:
1958 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1959 mappedTo=mapping[location]
1961 mappedTo=mappedTo[index]
1962 seqList.remove(specifiedCommand)
1963 seqList.extend(mappedTo.split(
'+'))
1966 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1974 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1975 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1976 from DQMOffline.Configuration.autoDQM
import autoDQM
1980 if len(set(sequenceList))!=len(sequenceList):
1981 sequenceList=list(set(sequenceList))
1982 print(
"Duplicate entries for DQM:, using",sequenceList)
1984 pathName=
'dqmoffline_step' 1985 for (i,sequence)
in enumerate(sequenceList):
1987 pathName=
'dqmoffline_%d_step'%(i)
1992 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process,sequence ) ) )
1995 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
2000 pathName=
'dqmofflineOnPAT_step' 2001 for (i,sequence)
in enumerate(postSequenceList):
2003 if (sequenceList[i]==postSequenceList[i]):
2006 pathName=
'dqmofflineOnPAT_%d_step'%(i)
2008 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process, sequence ) ) )
2012 """ Enrich the process with harvesting step """ 2013 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self.
_options.harvesting+
'_cff' 2017 sequence = sequence.split(
'.')[-1]
2020 harvestingList = sequence.split(
"+")
2021 from DQMOffline.Configuration.autoDQM
import autoDQM
2022 from Validation.Configuration.autoValidation
import autoValidation
2024 combined_mapping = copy.deepcopy( autoDQM )
2025 combined_mapping.update( autoValidation )
2026 self.
expandMapping(harvestingList,combined_mapping,index=-1)
2028 if len(set(harvestingList))!=len(harvestingList):
2029 harvestingList=list(set(harvestingList))
2030 print(
"Duplicate entries for HARVESTING, using",harvestingList)
2032 for name
in harvestingList:
2033 if not name
in harvestingConfig.__dict__:
2034 print(name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2038 harvestingstream = getattr(harvestingConfig,name)
2039 if isinstance(harvestingstream,cms.Path):
2042 if isinstance(harvestingstream,cms.Sequence):
2043 setattr(self.
process,name+
"_step",cms.Path(harvestingstream))
2050 """ Enrich the process with AlCaHarvesting step """ 2052 sequence=sequence.split(
".")[-1]
2055 harvestingList = sequence.split(
"+")
2059 from Configuration.AlCa.autoPCL
import autoPCL
2062 for name
in harvestingConfig.__dict__:
2063 harvestingstream = getattr(harvestingConfig,name)
2064 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2066 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2067 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2068 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2069 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2071 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2072 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2073 harvestingList.remove(name)
2075 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2078 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2079 print(
"The following harvesting could not be found : ", harvestingList)
2080 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2095 """ Add useful info for the production. """ 2096 self.
process.configurationMetadata=cms.untracked.PSet\
2097 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2098 name=cms.untracked.string(
"Applications"),
2099 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2107 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2109 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2118 from Configuration.StandardSequences.Eras
import eras
2119 for requestedEra
in self.
_options.era.split(
",") :
2120 modifierStrings.append(requestedEra)
2121 modifierImports.append(eras.pythonCfgLines[requestedEra])
2122 modifiers.append(getattr(eras,requestedEra))
2128 for c
in self.
_options.procModifiers:
2129 thingsImported.extend(c.split(
","))
2130 for pm
in thingsImported:
2131 modifierStrings.append(pm)
2132 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2133 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2139 if len(modifierStrings)>0:
2146 if len(modifiers)>0:
2154 def prepare(self, doChecking = False):
2155 """ Prepare the configuration string and add missing pieces.""" 2167 outputModuleCfgCode=
"" 2173 self.
pythonCfgCode +=
"# import of standard configurations\n" 2178 if not hasattr(self.
process,
"configurationMetadata"):
2200 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2201 tmpOut = cms.EndPath(output)
2202 setattr(self.
process,name+
'OutPath',tmpOut)
2211 for object
in self.
_options.inlineObjets.split(
','):
2214 if not hasattr(self.
process,object):
2215 print(
'cannot inline -'+object+
'- : not known')
2220 if self.
_options.pileup==
'HiMixEmbGEN':
2221 self.
pythonCfgCode +=
"\nprocess.generator.embeddingMode=cms.int32(1)\n" 2225 for path
in self.
process.paths:
2229 for endpath
in self.
process.endpaths:
2237 pathNames = [
'process.'+p.label_()
for p
in self.
schedule]
2238 if self.
process.schedule ==
None:
2239 self.
process.schedule = cms.Schedule()
2241 self.
process.schedule.append(item)
2242 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2245 raise Exception(
'the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2247 for index, item
in enumerate(self.
schedule):
2249 self.
process.schedule.insert(index, item)
2251 self.
process.schedule.append(item)
2253 result =
"# process.schedule imported from cff in HLTrigger.Configuration\n" 2255 result +=
'process.schedule.insert('+
str(index)+
', '+item+
')\n' 2262 self.
process.schedule.associate(getattr(self.
process, labelToAssociate))
2263 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2267 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2270 overrideThreads = (self.
_options.nThreads !=
"1")
2271 overrideConcurrentLumis = (self.
_options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2272 overrideConcurrentIOVs = (self.
_options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2274 if overrideThreads
or overrideConcurrentLumis
or overrideConcurrentIOVs:
2282 if overrideConcurrentLumis:
2283 self.
pythonCfgCode +=
"process.options.numberOfConcurrentLuminosityBlocks = "+self.
_options.nConcurrentLumis+
"\n" 2284 self.
process.options.numberOfConcurrentLuminosityBlocks =
int(self.
_options.nConcurrentLumis)
2285 if overrideConcurrentIOVs:
2286 self.
pythonCfgCode +=
"process.options.eventSetup.numberOfConcurrentIOVs = "+self.
_options.nConcurrentIOVs+
"\n" 2287 self.
process.options.eventSetup.numberOfConcurrentIOVs =
int(self.
_options.nConcurrentIOVs)
2289 if self.
_options.accelerators
is not None:
2290 accelerators = self.
_options.accelerators.split(
',')
2292 self.
pythonCfgCode +=
"# Enable only these accelerator backends\n" 2293 self.
pythonCfgCode +=
"process.load('Configuration.StandardSequences.Accelerators_cff')\n" 2294 self.
pythonCfgCode +=
"process.options.accelerators = ['" +
"', '".
join(accelerators) +
"']\n" 2295 self.
process.
load(
'Configuration.StandardSequences.Accelerators_cff')
2296 self.
process.options.accelerators = accelerators
2301 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2302 self.
pythonCfgCode +=
"MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n" 2303 MassReplaceInputTag(self.
process, new=
"rawDataMapperByLabel", old=
"rawDataCollector")
2307 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2315 for path
in self.
process.paths:
2325 print(
"--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2334 if hasattr(self.
process,
"logErrorHarvester"):
2336 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2337 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2338 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2339 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2346 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2347 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2348 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2350 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2353 imports = cms.specialImportRegistry.getSpecialImports()
2354 if len(imports) > 0:
2366 if not self.
_options.io.endswith(
'.io'): self._option.io+=
'.io' 2369 if hasattr(self.
process.source,
"fileNames"):
2370 if len(self.
process.source.fileNames.value()):
2371 ioJson[
'primary']=self.
process.source.fileNames.value()
2372 if hasattr(self.
process.source,
"secondaryFileNames"):
2373 if len(self.
process.source.secondaryFileNames.value()):
2374 ioJson[
'secondary']=self.
process.source.secondaryFileNames.value()
2375 if self.
_options.pileup_input
and (self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:')):
2376 ioJson[
'pileup']=self.
_options.pileup_input[4:]
2378 ioJson[o]=om.fileName.value()
2379 ioJson[
'GT']=self.
process.GlobalTag.globaltag.value()
2383 io.write(json.dumps(ioJson))
2386 def load(self, includeFile)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def prepare_L1REPACK(self, sequence=None)
def finalizeFastSimHLT(self)
inliner
load the relevant part
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def customiseEarlyDelete(process)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_RECO(self, sequence="reconstruction")
scheduleIndexOfFirstHLTPath
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def prepare_PATGEN(self, sequence="miniGEN")
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def prepare_RECOSIM(self, sequence="recosim")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, sequence=None)
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_NANO(self, sequence="nanoAOD")
def split(sequence, size)
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
def prepare_ALCAHARVEST(self, sequence=None)
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_NANOGEN(self, sequence="nanoAOD")
def prepare_L1(self, sequence=None)
def lumi_to_run(runs, events_in_sample, events_per_job)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def completeInputCommand(self)
def executeAndRemember(self, command)
def addStandardSequences(self)
def prepare_PAT(self, sequence="miniAOD")
nextScheduleIsConditional
put the filtering path in the schedule