3 from __future__
import print_function
4 __version__ =
"$Revision: 1.19 $" 5 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 7 import FWCore.ParameterSet.Config
as cms
8 from FWCore.ParameterSet.Modules
import _Module
11 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
17 from subprocess
import Popen,PIPE
18 import FWCore.ParameterSet.DictTypes
as DictTypes
24 defaultOptions.datamix =
'DataOnSim' 25 defaultOptions.isMC=
False 26 defaultOptions.isData=
True 27 defaultOptions.step=
'' 28 defaultOptions.pileup=
'NoPileUp' 29 defaultOptions.pileup_input =
None 30 defaultOptions.pileup_dasoption =
'' 31 defaultOptions.geometry =
'SimDB' 32 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
33 defaultOptions.magField =
'' 34 defaultOptions.conditions =
None 35 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
36 defaultOptions.harvesting=
'AtRunEnd' 37 defaultOptions.gflash =
False 38 defaultOptions.number = -1
39 defaultOptions.number_out =
None 40 defaultOptions.arguments =
"" 41 defaultOptions.name =
"NO NAME GIVEN" 42 defaultOptions.evt_type =
"" 43 defaultOptions.filein =
"" 44 defaultOptions.dasquery=
"" 45 defaultOptions.dasoption=
"" 46 defaultOptions.secondfilein =
"" 47 defaultOptions.customisation_file = []
48 defaultOptions.customisation_file_unsch = []
49 defaultOptions.customise_commands =
"" 50 defaultOptions.inline_custom=
False 51 defaultOptions.particleTable =
'pythiapdt' 52 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
53 defaultOptions.dirin =
'' 54 defaultOptions.dirout =
'' 55 defaultOptions.filetype =
'EDM' 56 defaultOptions.fileout =
'output.root' 57 defaultOptions.filtername =
'' 58 defaultOptions.lazy_download =
False 59 defaultOptions.custom_conditions =
'' 60 defaultOptions.hltProcess =
'' 61 defaultOptions.eventcontent =
None 62 defaultOptions.datatier =
None 63 defaultOptions.inlineEventContent =
True 64 defaultOptions.inlineObjets =
'' 65 defaultOptions.hideGen=
False 66 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
67 defaultOptions.beamspot=
None 68 defaultOptions.outputDefinition =
'' 69 defaultOptions.inputCommands =
None 70 defaultOptions.outputCommands =
None 71 defaultOptions.inputEventContent =
'' 72 defaultOptions.dropDescendant =
False 73 defaultOptions.relval =
None 74 defaultOptions.profile =
None 75 defaultOptions.isRepacked =
False 76 defaultOptions.restoreRNDSeeds =
False 77 defaultOptions.donotDropOnInput =
'' 78 defaultOptions.python_filename =
'' 79 defaultOptions.io=
None 80 defaultOptions.lumiToProcess=
None 81 defaultOptions.fast=
False 82 defaultOptions.runsAndWeightsForMC =
None 83 defaultOptions.runsScenarioForMC =
None 84 defaultOptions.runsAndWeightsForMCIntegerWeights =
None 85 defaultOptions.runsScenarioForMCIntegerWeights =
None 86 defaultOptions.runUnscheduled =
False 87 defaultOptions.timeoutOutput =
False 88 defaultOptions.nThreads =
'1' 89 defaultOptions.nStreams =
'0' 90 defaultOptions.nConcurrentLumis =
'0' 91 defaultOptions.nConcurrentIOVs =
'0' 92 defaultOptions.accelerators =
None 96 theObject = getattr(process,name)
97 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
98 return "process."+name+
" = " + theObject.dumpPython()
99 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
100 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 102 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 105 import FWCore.ParameterSet.Config
as cms
108 for line
in open(fileName,
'r'): 109 if line.count(
".root")>=2:
111 entries=line.replace(
"\n",
"").
split()
112 prim.append(entries[0])
113 sec.append(entries[1])
114 elif (line.find(
".root")!=-1):
115 entry=line.replace(
"\n",
"")
119 prim = [f
for f
in prim
if not (f
in file_seen
or file_seen.add(f))]
121 sec = [f
for f
in sec
if not (f
in file_seen
or file_seen.add(f))]
123 if not hasattr(s,
"fileNames"):
124 s.fileNames=cms.untracked.vstring(prim)
126 s.fileNames.extend(prim)
128 if not hasattr(s,
"secondaryFileNames"):
129 s.secondaryFileNames=cms.untracked.vstring(sec)
131 s.secondaryFileNames.extend(sec)
132 print(
"found files: ",prim)
134 raise Exception(
"There are not files in input from the file list")
136 print(
"found parent files:",sec)
141 import FWCore.ParameterSet.Config
as cms
144 print(
"the query is",query)
147 while eC!=0
and count<3:
149 print(
'Sleeping, then retrying DAS')
151 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True, universal_newlines=
True)
153 tupleP = os.waitpid(p.pid, 0)
157 print(
"DAS succeeded after",count,
"attempts",eC)
159 print(
"DAS failed 3 times- I give up")
160 for line
in pipe.split(
'\n'):
161 if line.count(
".root")>=2:
163 entries=line.replace(
"\n",
"").
split()
164 prim.append(entries[0])
165 sec.append(entries[1])
166 elif (line.find(
".root")!=-1):
167 entry=line.replace(
"\n",
"")
170 prim = sorted(list(set(prim)))
171 sec = sorted(list(set(sec)))
173 if not hasattr(s,
"fileNames"):
174 s.fileNames=cms.untracked.vstring(prim)
176 s.fileNames.extend(prim)
178 if not hasattr(s,
"secondaryFileNames"):
179 s.secondaryFileNames=cms.untracked.vstring(sec)
181 s.secondaryFileNames.extend(sec)
182 print(
"found files: ",prim)
184 print(
"found parent files:",sec)
187 def anyOf(listOfKeys,dict,opt=None):
196 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
199 """The main building routines """ 201 def __init__(self, options, process = None, with_output = False, with_input = False ):
202 """options taken from old cmsDriver and optparse """ 204 options.outfile_name = options.dirout+options.fileout
208 if self.
_options.isData
and options.isMC:
209 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
215 if (hasattr(self.
_options,
"outputDefinition")
and \
216 self.
_options.outputDefinition !=
'' and \
217 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self.
_options.outputDefinition)))
or \
218 (hasattr(self.
_options,
"datatier")
and \
221 print(
"removing ENDJOB from steps since not compatible with DQMIO dataTier")
227 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
230 for step
in self.
_options.step.split(
","):
231 if step==
'':
continue 232 stepParts = step.split(
":")
233 stepName = stepParts[0]
234 if stepName
not in stepList
and not stepName.startswith(
're'):
235 raise ValueError(
"Step "+stepName+
" unknown")
236 if len(stepParts)==1:
238 elif len(stepParts)==2:
240 elif len(stepParts)==3:
241 self.
stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
243 raise ValueError(
"Step definition "+step+
" invalid")
251 if hasattr(self.
_options,
"no_output_flag")
and self.
_options.no_output_flag:
279 Function to add the igprof profile service so that you can dump in the middle 282 profileOpts = self.
_options.profile.split(
':')
284 profilerInterval = 100
285 profilerFormat =
None 286 profilerJobFormat =
None 292 startEvent = profileOpts.pop(0)
293 if not startEvent.isdigit():
294 raise Exception(
"%s is not a number" % startEvent)
295 profilerStart =
int(startEvent)
297 eventInterval = profileOpts.pop(0)
298 if not eventInterval.isdigit():
299 raise Exception(
"%s is not a number" % eventInterval)
300 profilerInterval =
int(eventInterval)
302 profilerFormat = profileOpts.pop(0)
305 if not profilerFormat:
306 profilerFormat =
"%s___%s___%%I.gz" % (
307 self.
_options.evt_type.replace(
"_cfi",
""),
313 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
314 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
315 elif not profilerJobFormat:
316 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 318 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
321 includeFile = includeFile.replace(
'/',
'.')
323 return sys.modules[includeFile]
326 """helper routine to load am memorize imports""" 329 includeFile = includeFile.replace(
'/',
'.')
332 return sys.modules[includeFile]
335 """helper routine to remember replace statements""" 337 if not command.strip().startswith(
"#"):
340 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
345 self.
process.options.Rethrow = [
'ProductNotFound']
346 self.
process.options.fileMode =
'FULLMERGE' 351 self.
process.AdaptorConfig = cms.Service(
"AdaptorConfig",
352 stats = cms.untracked.bool(
True),
353 enable = cms.untracked.bool(
True),
354 cacheHint = cms.untracked.string(
"lazy-download"),
355 readHint = cms.untracked.string(
"read-ahead-buffered")
364 self.
process.IgProfService = cms.Service(
"IgProfService",
365 reportFirstEvent = cms.untracked.int32(start),
366 reportEventInterval = cms.untracked.int32(interval),
367 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
368 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
372 """Here we decide how many evts will be processed""" 379 """Here the source is built. Priority: file, generator""" 382 def filesFromOption(self):
383 for entry
in self.
_options.filein.split(
','):
385 if entry.startswith(
"filelist:"):
387 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
392 if not hasattr(self.
process.source,
"secondaryFileNames"):
393 raise Exception(
"--secondfilein not compatible with "+self.
_options.filetype+
"input type")
394 for entry
in self.
_options.secondfilein.split(
','):
396 if entry.startswith(
"filelist:"):
398 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
401 self.
process.source.secondaryFileNames.append(self.
_options.dirin+entry)
405 self.
process.source=cms.Source(
"PoolSource",
406 fileNames = cms.untracked.vstring(),
407 secondaryFileNames= cms.untracked.vstring())
408 filesFromOption(self)
409 elif self.
_options.filetype ==
"DAT":
410 self.
process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
411 filesFromOption(self)
412 elif self.
_options.filetype ==
"LHE":
413 self.
process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
414 if self.
_options.filein.startswith(
"lhe:"):
416 args=self.
_options.filein.split(
':')
418 print(
'LHE input from article ',article)
419 location=
'/store/lhe/' 421 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
422 for line
in textOfFiles:
423 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
424 self.
process.source.fileNames.append(location+article+
'/'+fileName)
427 print(
'Issue to load LHE files, please check and try again.')
430 if len(self.
process.source.fileNames)==0:
431 print(
'Issue with empty filename, but can pass line check')
434 self.
process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
436 filesFromOption(self)
438 elif self.
_options.filetype ==
"DQM":
439 self.
process.source=cms.Source(
"DQMRootSource",
440 fileNames = cms.untracked.vstring())
441 filesFromOption(self)
443 elif self.
_options.filetype ==
"DQMDAQ":
445 self.
process.source=cms.Source(
"DQMStreamerReader")
449 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
452 self.
process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
456 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
461 self.
_options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 463 self.
_options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 466 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
467 for command
in self.
_options.inputCommands.split(
','):
469 command = command.strip()
470 if command==
'':
continue 471 self.
process.source.inputCommands.append(command)
472 if not self.
_options.dropDescendant:
473 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
476 import FWCore.PythonUtilities.LumiList
as LumiList
480 if self.
process.source
is None:
481 self.
process.source=cms.Source(
"EmptySource")
487 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
488 if self.
_options.runsAndWeightsForMC:
491 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
492 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMC], str):
493 __import__(RunsAndWeights[self.
_options.runsScenarioForMC])
494 self.
runsAndWeights = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMC]].runProbabilityDistribution
499 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
506 if self.
_options.runsAndWeightsForMCIntegerWeights
or self.
_options.runsScenarioForMCIntegerWeights:
508 raise Exception(
"options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
509 if self.
_options.runsAndWeightsForMCIntegerWeights:
512 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
513 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights], str):
514 __import__(RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights])
515 self.
runsAndWeightsInt = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
521 raise Exception(
"--relval option required when using --runsAndWeightsInt")
523 from SimGeneral.Configuration.LumiToRun
import lumi_to_run
524 total_events, events_per_job = self.
_options.relval.split(
',')
526 self.
additionalCommands.
append(
"process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " +
str(lumi_to_run_mapping) +
"])")
531 """ Add output module to the process """ 535 print(
"--datatier & --eventcontent options ignored")
538 outList = eval(self.
_options.outputDefinition)
539 for (id,outDefDict)
in enumerate(outList):
540 outDefDictStr=outDefDict.__str__()
541 if not isinstance(outDefDict,dict):
542 raise Exception(
"--output needs to be passed a list of dict"+self.
_options.outputDefinition+
" is invalid")
544 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
547 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
548 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
549 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
550 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
551 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
553 if not theModuleLabel:
554 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
555 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
556 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 558 for name
in tryNames:
559 if not hasattr(self.
process,name):
562 if not theModuleLabel:
563 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
565 defaultFileName=self.
_options.outfile_name
567 defaultFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
569 theFileName=self.
_options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
570 if not theFileName.endswith(
'.root'):
574 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
575 if theStreamType==
'DQMIO': theStreamType=
'DQM' 576 if theStreamType==
'ALL':
577 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
579 theEventContent = getattr(self.
process, theStreamType+
"EventContent")
583 if theStreamType==
'ALCARECO' and not theFilterName:
584 theFilterName=
'StreamALCACombined' 587 CppType=
'PoolOutputModule' 589 CppType=
'TimeoutPoolOutputModule' 590 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 591 output = cms.OutputModule(CppType,
592 theEventContent.clone(),
593 fileName = cms.untracked.string(theFileName),
594 dataset = cms.untracked.PSet(
595 dataTier = cms.untracked.string(theTier),
596 filterName = cms.untracked.string(theFilterName))
598 if not theSelectEvent
and hasattr(self.
process,
'generation_step')
and theStreamType!=
'LHE':
599 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
600 if not theSelectEvent
and hasattr(self.
process,
'filtering_step'):
601 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
603 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
606 if not hasattr(output,
'SelectEvents'):
607 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
609 output.SelectEvents.SelectEvents.extend(getattr(self.
process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
612 if hasattr(self.
process,theModuleLabel):
613 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
615 setattr(self.
process,theModuleLabel,output)
616 outputModule=getattr(self.
process,theModuleLabel)
617 setattr(self.
process,theModuleLabel+
'_step',cms.EndPath(outputModule))
618 path=getattr(self.
process,theModuleLabel+
'_step')
621 if not self.
_options.inlineEventContent
and hasattr(self.
process,theStreamType+
"EventContent"):
622 def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process.
"+theStreamType+"EventContent.outputCommands)
"): return label
623 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
624 if theExtraOutputCommands:
625 if not isinstance(theExtraOutputCommands,list):
626 raise Exception(
"extra ouput command in --option must be a list of strings")
627 if hasattr(self.
process,theStreamType+
"EventContent"):
628 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
630 outputModule.outputCommands.extend(theExtraOutputCommands)
632 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
637 streamTypes=self.
_options.eventcontent.split(
',')
638 tiers=self.
_options.datatier.split(
',')
639 if not self.
_options.outputDefinition
and len(streamTypes)!=len(tiers):
640 raise Exception(
"number of event content arguments does not match number of datatier arguments")
646 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
647 if streamType==
'':
continue 648 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self.
_options.step:
continue 649 if streamType==
'DQMIO': streamType=
'DQM' 650 eventContent=streamType
652 if streamType ==
"NANOEDMAOD" :
653 eventContent =
"NANOAOD" 654 elif streamType ==
"NANOEDMAODSIM" :
655 eventContent =
"NANOAODSIM" 656 theEventContent = getattr(self.
process, eventContent+
"EventContent")
658 theFileName=self.
_options.outfile_name
659 theFilterName=self.
_options.filtername
661 theFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
662 theFilterName=self.
_options.filtername
663 CppType=
'PoolOutputModule' 665 CppType=
'TimeoutPoolOutputModule' 666 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 667 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 668 output = cms.OutputModule(CppType,
670 fileName = cms.untracked.string(theFileName),
671 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
672 filterName = cms.untracked.string(theFilterName)
675 if hasattr(self.
process,
"generation_step")
and streamType!=
'LHE':
676 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
677 if hasattr(self.
process,
"filtering_step"):
678 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
680 if streamType==
'ALCARECO':
681 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
683 if "MINIAOD" in streamType:
684 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
687 outputModuleName=streamType+
'output' 688 setattr(self.
process,outputModuleName,output)
689 outputModule=getattr(self.
process,outputModuleName)
690 setattr(self.
process,outputModuleName+
'_step',cms.EndPath(outputModule))
691 path=getattr(self.
process,outputModuleName+
'_step')
694 if self.
_options.outputCommands
and streamType!=
'DQM':
695 for evct
in self.
_options.outputCommands.split(
','):
696 if not evct:
continue 697 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
699 if not self.
_options.inlineEventContent:
700 tmpstreamType=streamType
701 if "NANOEDM" in tmpstreamType :
702 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
703 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
705 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
707 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
713 Add selected standard sequences to the process 717 pileupSpec=self.
_options.pileup.split(
',')[0]
720 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
721 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
722 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
726 if '.' in pileupSpec:
727 mixingDict={
'file':pileupSpec}
728 elif pileupSpec.startswith(
'file:'):
729 mixingDict={
'file':pileupSpec[5:]}
732 mixingDict=copy.copy(Mixing[pileupSpec])
733 if len(self.
_options.pileup.split(
','))>1:
734 mixingDict.update(eval(self.
_options.pileup[self.
_options.pileup.find(
',')+1:]))
737 if 'file:' in pileupSpec:
740 print(
"inlining mixing module configuration")
745 mixingDict.pop(
'file')
748 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
750 elif self.
_options.pileup_input.startswith(
"filelist:"):
753 mixingDict[
'F']=self.
_options.pileup_input.split(
',')
755 for command
in specialization:
757 if len(mixingDict)!=0:
758 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
779 stepSpec = self.
stepMap[stepName]
780 print(
"Step:", stepName,
"Spec:",stepSpec)
781 if stepName.startswith(
're'):
783 if stepName[2:]
not in self.
_options.donotDropOnInput:
784 self.
_options.inputEventContent=
'%s,%s'%(stepName.upper(),self.
_options.inputEventContent)
785 stepName=stepName[2:]
787 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
788 elif isinstance(stepSpec, list):
789 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
790 elif isinstance(stepSpec, tuple):
791 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
793 raise ValueError(
"Invalid step definition")
795 if self.
_options.restoreRNDSeeds!=
False:
797 if self.
_options.restoreRNDSeeds==
True:
798 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
800 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self.
_options.restoreRNDSeeds))
803 self.
_options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 805 self.
_options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 811 def dropSecondDropStar(iec):
821 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
822 for evct
in self.
_options.inputEventContent.split(
','):
823 if evct==
'':
continue 824 theEventContent = getattr(self.
process, evct+
"EventContent")
825 if hasattr(theEventContent,
'outputCommands'):
826 self.
process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
827 if hasattr(theEventContent,
'inputCommands'):
828 self.
process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
830 dropSecondDropStar(self.
process.source.inputCommands)
832 if not self.
_options.dropDescendant:
833 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
839 """Add conditions to the process""" 840 if not self.
_options.conditions:
return 842 if 'FrontierConditions_GlobalTag' in self.
_options.conditions:
843 print(
'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
844 self.
_options.conditions = self.
_options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
847 from Configuration.AlCa.GlobalTag
import GlobalTag
854 """Include the customise code """ 858 for c
in self.
_options.customisation_file:
859 custOpt.extend(c.split(
","))
861 for c
in self.
_options.customisation_file_unsch:
862 custOpt.extend(c.split(
","))
868 raise Exception(
"more than . in the specification:"+opt)
869 fileName=opt.split(
'.')[0]
870 if opt.count(
'.')==0: rest=
'customise' 872 rest=opt.split(
'.')[1]
873 if rest==
'py': rest=
'customise' 875 if fileName
in custMap:
876 custMap[fileName].extend(rest.split(
'+'))
878 custMap[fileName]=rest.split(
'+')
883 final_snippet=
'\n# customisation of the process.\n' 887 allFcn.extend(custMap[opt])
889 if allFcn.count(fcn)!=1:
890 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
894 packageName = f.replace(
".py",
"").
replace(
"/",
".")
895 __import__(packageName)
896 package = sys.modules[packageName]
899 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
901 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 903 for line
in file(customiseFile,
'r'): 904 if "import FWCore.ParameterSet.Config" in line:
906 final_snippet += line
908 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
909 for fcn
in custMap[f]:
910 print(
"customising the process with",fcn,
"from",f)
911 if not hasattr(package,fcn):
913 raise Exception(
"config "+f+
" has no function "+fcn)
917 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
918 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
921 final_snippet +=
'\n# End of customisation functions\n' 927 final_snippet=
'\n# Customisation from command line\n' 928 if self.
_options.customise_commands:
930 for com
in self.
_options.customise_commands.split(
'\\n'):
933 final_snippet +=
'\n'+com
944 if self.
_options.particleTable
not in defaultOptions.particleTableList:
945 print(
'Invalid particle table provided. Options are:')
946 print(defaultOptions.particleTable)
954 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreams_cff" 987 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 1025 self.
_options.beamspot=VtxSmearedDefaultKey
1030 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 1031 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 1034 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1043 if self.
_options.scenario==
'cosmics':
1045 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1046 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1047 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1057 if self.
_options.scenario==
'HeavyIons':
1059 self.
_options.beamspot=VtxSmearedHIDefaultKey
1064 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1066 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1069 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1081 self.
magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self.
_options.magField.replace(
'.',
'')+
'_cff' 1085 self.
GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff' 1089 if 'start' in self.
_options.conditions.lower():
1090 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1092 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1094 def inGeometryKeys(opt):
1095 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1096 if opt
in GeometryConf:
1097 return GeometryConf[opt]
1101 geoms=self.
_options.geometry.split(
',')
1102 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).
split(
',')
1105 if '/' in geoms[1]
or '_cff' in geoms[1]:
1108 self.
GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1110 if (geoms[0].startswith(
'DB:')):
1111 self.
SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff' 1115 if '/' in geoms[0]
or '_cff' in geoms[0]:
1118 simGeometry=geoms[0]
1120 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1122 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1125 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1126 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1129 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1134 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1135 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1142 if self.
_options.pileup==
'default':
1143 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1144 self.
_options.pileup=MixingDefaultKey
1157 output = cms.OutputModule(
"PoolOutputModule")
1158 if stream.selectEvents.parameters_().__len__()!=0:
1159 output.SelectEvents = stream.selectEvents
1161 output.SelectEvents = cms.untracked.PSet()
1162 output.SelectEvents.SelectEvents=cms.vstring()
1163 if isinstance(stream.paths,tuple):
1164 for path
in stream.paths:
1165 output.SelectEvents.SelectEvents.append(path.label())
1167 output.SelectEvents.SelectEvents.append(stream.paths.label())
1171 if isinstance(stream.content,str):
1172 evtPset=getattr(self.process,stream.content)
1173 for p
in evtPset.parameters_():
1174 setattr(output,p,getattr(evtPset,p))
1175 if not self._options.inlineEventContent:
1176 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1178 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1180 output.outputCommands = stream.content
1183 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1185 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1186 filterName = cms.untracked.string(stream.name))
1188 if self._options.filtername:
1189 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1192 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1194 if workflow
in (
"producers,full"):
1195 if isinstance(stream.paths,tuple):
1196 for path
in stream.paths:
1197 self.schedule.
append(path)
1199 self.schedule.
append(stream.paths)
1203 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1204 self.additionalOutputs[name] = output
1205 setattr(self.process,name,output)
1207 if workflow ==
'output':
1209 filterList = output.SelectEvents.SelectEvents
1210 for i, filter
in enumerate(filterList):
1211 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1221 if ( len(sequence.split(
'.'))==1 ):
1223 elif ( len(sequence.split(
'.'))==2 ):
1225 sequence=sequence.split(
'.')[1]
1227 print(
"sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1228 print(sequence,
"not recognized")
1235 for i,s
in enumerate(seq.split(
'*')):
1237 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, s) ))
1239 p=getattr(self.
process,prefix)
1240 tmp = getattr(self.
process, s)
1241 if isinstance(tmp, cms.Task):
1252 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, seq) ))
1255 for i,s
in enumerate(seq.split(
'+')):
1257 setattr(self.
process,sn,getattr(cms,what)( getattr(self.
process, s) ))
1271 def prepare_ALCA(self, sequence = None, workflow = 'full'):
1272 """ Enrich the process with alca streams """ 1274 sequence = sequence.split(
'.')[-1]
1278 alcaList = sequence.split(
"+")
1279 for alca
in alcaList:
1280 if (len(alca)>MAXLEN):
1281 raise Exception(
"The following alca "+
str(alca)+
" name (with length "+
str(len(alca))+
" chars) cannot be accepted because it exceeds the DBS constraints on the length of the name of the ALCARECOs producers ("+
str(MAXLEN)+
")!")
1284 from Configuration.AlCa.autoAlca
import autoAlca, AlCaNoConcurrentLumis
1288 for name
in alcaConfig.__dict__:
1289 alcastream = getattr(alcaConfig,name)
1290 shortName = name.replace(
'ALCARECOStream',
'')
1291 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1292 if shortName
in AlCaNoConcurrentLumis:
1293 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".
format(shortName))
1294 self.
_options.nConcurrentLumis =
"1" 1295 self.
_options.nConcurrentIOVs =
"1" 1296 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1297 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1299 if 'DQM' in alcaList:
1300 if not self.
_options.inlineEventContent
and hasattr(self.
process,name):
1301 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1303 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1307 if isinstance(alcastream.paths,tuple):
1308 for path
in alcastream.paths:
1313 for i
in range(alcaList.count(shortName)):
1314 alcaList.remove(shortName)
1317 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1318 path = getattr(alcaConfig,name)
1320 alcaList.remove(
'DQM')
1322 if isinstance(alcastream,cms.Path):
1327 if len(alcaList) != 0:
1329 for name
in alcaConfig.__dict__:
1330 alcastream = getattr(alcaConfig,name)
1331 if isinstance(alcastream,cms.FilteredStream):
1332 available.append(name.replace(
'ALCARECOStream',
''))
1333 print(
"The following alcas could not be found "+
str(alcaList))
1334 print(
"available ",available)
1336 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1341 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1342 print(
"Loading lhe fragment from",loadFragment)
1343 __import__(loadFragment)
1344 self.process.
load(loadFragment)
1346 self._options.inlineObjets+=
','+sequence
1348 getattr(self.process,sequence).nEvents =
int(self._options.number)
1351 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1352 self.excludedPaths.
append(
"lhe_step")
1353 self.schedule.
append( self.process.lhe_step )
1356 """ load the fragment of generator configuration """ 1363 if not '/' in loadFragment:
1364 loadFragment=
'Configuration.Generator.'+loadFragment
1366 loadFragment=loadFragment.replace(
'/',
'.')
1368 print(
"Loading generator fragment from",loadFragment)
1369 __import__(loadFragment)
1374 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1377 from Configuration.Generator.concurrentLumisDisable
import noConcurrentLumiGenerators
1379 generatorModule=sys.modules[loadFragment]
1380 genModules=generatorModule.__dict__
1391 import FWCore.ParameterSet.Modules
as cmstypes
1392 for name
in genModules:
1393 theObject = getattr(generatorModule,name)
1394 if isinstance(theObject, cmstypes._Module):
1396 if theObject.type_()
in noConcurrentLumiGenerators:
1397 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".
format(theObject.type_()))
1398 self.
_options.nConcurrentLumis =
"1" 1399 self.
_options.nConcurrentIOVs =
"1" 1400 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1401 self.
_options.inlineObjets+=
','+name
1403 if sequence == self.
GENDefaultSeq or sequence ==
'pgen_genonly':
1404 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1406 elif 'generator' in genModules:
1409 """ Enrich the schedule with the rest of the generation step """ 1411 genSeqName=sequence.split(
'.')[-1]
1415 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1416 cffToBeLoaded=VtxSmeared[self.
_options.beamspot]
1419 raise Exception(
"VertexSmearing type or beamspot "+self.
_options.beamspot+
" unknown.")
1421 if self.
_options.scenario ==
'HeavyIons':
1422 if self.
_options.pileup==
'HiMixGEN':
1423 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1424 elif self.
_options.pileup==
'HiMixEmbGEN':
1425 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorEmbMix_cff")
1427 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1429 self.
process.generation_step = cms.Path( getattr(self.
process,genSeqName) )
1433 self.
executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1439 """ Enrich the schedule with the summary of the filter step """ 1446 """ Enrich the schedule with the simulation step""" 1456 self.
executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1462 """ Enrich the schedule with the digitisation step""" 1466 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1468 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1469 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1471 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and sequence !=
'pdigi_hi_nogen' and not self.
process.source.type_()==
'EmptySource' and not self.
_options.filetype ==
"LHE":
1472 if self.
_options.inputEventContent==
'':
1473 self.
_options.inputEventContent=
'REGEN' 1482 """ Enrich the schedule with the crossing frame writer step""" 1488 """ Enrich the schedule with the digitisation step""" 1494 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
1496 elif self.
_options.pileup_input.startswith(
"filelist:"):
1499 theFiles=self.
_options.pileup_input.split(
',')
1501 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1516 """ Enrich the schedule with the L1 simulation step""" 1523 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1524 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1525 if sequence
in supported:
1526 self.
loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1527 if self.
_options.scenario ==
'HeavyIons':
1531 print(
"L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported)
1535 """ Enrich the schedule with the HLT simulation step""" 1537 print(
"no specification of the hlt menu has been given, should never happen")
1538 raise Exception(
'no HLT sequence provided')
1542 from Configuration.HLT.autoHLT
import autoHLT
1545 sequence = autoHLT[key]
1547 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1553 if self.
_options.scenario ==
'HeavyIons':
1554 optionsForHLT[
'type'] =
'HIon' 1556 optionsForHLT[
'type'] =
'GRun' 1557 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in optionsForHLT.items())
1558 if sequence ==
'run,fromSource':
1559 if hasattr(self.
process.source,
'firstRun'):
1560 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1561 elif hasattr(self.
process.source,
'setRunNumber'):
1562 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1564 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1566 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1571 self.
_options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1577 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1580 if self.
process.schedule ==
None:
1581 raise Exception(
'the HLT step did not attach a valid schedule to the process')
1588 if not hasattr(self.
process,
'HLTEndSequence'):
1589 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1594 seqReco=sequence.split(
',')[1]
1595 seqDigi=sequence.split(
',')[0]
1597 print(
"RAW2RECO requires two specifications",sequence,
"insufficient")
1613 for filt
in allMetFilterPaths:
1617 ''' Enrich the schedule with L1 HW validation ''' 1620 print(
'\n\n\n DEPRECATED this has no action \n\n\n')
1624 ''' Enrich the schedule with L1 reconstruction ''' 1630 ''' Enrich the schedule with L1 reconstruction ''' 1636 ''' Enrich the schedule with a user defined filter sequence ''' 1638 filterConfig=self.
load(sequence.split(
'.')[0])
1639 filterSeq=sequence.split(
'.')[-1]
1641 class PrintAllModules(
object):
1645 def enter(self,visitee):
1647 label=visitee.label()
1652 def leave(self,v):
pass 1654 expander=PrintAllModules()
1655 getattr(self.
process,filterSeq).visit( expander )
1656 self.
_options.inlineObjets+=
','+expander.inliner
1657 self.
_options.inlineObjets+=
','+filterSeq
1668 ''' Enrich the schedule with reconstruction ''' 1674 ''' Enrich the schedule with reconstruction ''' 1680 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1682 print(
"ERROR: this step is only implemented for FastSim")
1685 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1689 ''' Enrich the schedule with PAT ''' 1694 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1697 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1699 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1702 if len(self.
_options.customise_commands) > 1:
1703 self.
_options.customise_commands = self.
_options.customise_commands +
" \n" 1704 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patTrigger.processName = \""+self.
_options.hltProcess+
"\"\n" 1705 self.
_options.customise_commands = self.
_options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1706 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1713 ''' Enrich the schedule with PATGEN ''' 1717 raise Exception(
"PATGEN step can only run on MC")
1721 ''' Enrich the schedule with NANO ''' 1724 custom =
"nanoAOD_customizeData" if self.
_options.isData
else "nanoAOD_customizeMC" 1725 self.
_options.customisation_file.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1727 if len(self.
_options.customise_commands) > 1:
1728 self.
_options.customise_commands = self.
_options.customise_commands +
" \n" 1729 self.
_options.customise_commands = self.
_options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1732 ''' Enrich the schedule with NANOGEN ''' 1734 fromGen =
any([x
in self.
stepMap for x
in [
'LHE',
'GEN',
'AOD']])
1737 custom =
"customizeNanoGEN" if fromGen
else "customizeNanoGENFromMini" 1744 ''' Enrich the schedule with skimming fragments''' 1746 sequence = sequence.split(
'.')[-1]
1748 skimlist=sequence.split(
'+')
1750 from Configuration.Skimming.autoSkim
import autoSkim
1754 for skim
in skimConfig.__dict__:
1755 skimstream = getattr(skimConfig,skim)
1756 if isinstance(skimstream,cms.Path):
1759 if (
not isinstance(skimstream,cms.FilteredStream)):
1761 shortname = skim.replace(
'SKIMStream',
'')
1762 if (sequence==
"all"):
1764 elif (shortname
in skimlist):
1769 skimstreamDQM = cms.FilteredStream(
1770 responsible = skimstream.responsible,
1771 name = skimstream.name+
'DQM',
1772 paths = skimstream.paths,
1773 selectEvents = skimstream.selectEvents,
1774 content = self.
_options.datatier+
'EventContent',
1775 dataTier = cms.untracked.string(self.
_options.datatier)
1778 for i
in range(skimlist.count(shortname)):
1779 skimlist.remove(shortname)
1783 if (skimlist.__len__()!=0
and sequence!=
"all"):
1784 print(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1785 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1788 ''' Enrich the schedule with a user defined sequence ''' 1794 """ Enrich the schedule with the postreco step """ 1801 print(sequence,
"in preparing validation")
1803 from Validation.Configuration.autoValidation
import autoValidation
1805 sequence=sequence.split(
'.')[-1]
1806 if sequence.find(
',')!=-1:
1807 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1808 valSeqName=sequence.split(
',')[1].
split(
'+')
1813 prevalSeqName=sequence.split(
'+')
1814 valSeqName=sequence.split(
'+')
1820 postfix=
'_'+sequence
1821 prevalSeqName=[
'prevalidation'+postfix]
1822 valSeqName=[
'validation'+postfix]
1823 if not hasattr(self.
process,valSeqName[0]):
1825 valSeqName=[sequence]
1837 for s
in valSeqName+prevalSeqName:
1840 for (i,s)
in enumerate(prevalSeqName):
1842 setattr(self.
process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.
process, s)) )
1845 for (i,s)
in enumerate(valSeqName):
1846 setattr(self.
process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.
process, s)))
1853 if not 'DIGI' in self.
stepMap and not self.
_options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1854 if self.
_options.restoreRNDSeeds==
False and not self.
_options.restoreRNDSeeds==
True:
1861 self.
_options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1863 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
1865 for (i,s)
in enumerate(valSeqName):
1872 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1873 It will climb down within PSets, VPSets and VInputTags to find its target""" 1874 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1880 def doIt(self,pset,base):
1881 if isinstance(pset, cms._Parameterizable):
1882 for name
in pset.parameters_().
keys():
1888 value = getattr(pset,name)
1889 type = value.pythonTypeName()
1890 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1891 self.
doIt(value,base+
"."+name)
1892 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1893 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1894 elif type
in (
'cms.string',
'cms.untracked.string'):
1898 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1899 for (i,n)
in enumerate(value):
1900 if not isinstance(n, cms.InputTag):
1907 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1908 for (i,n)
in enumerate(value):
1911 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1914 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1916 def enter(self,visitee):
1919 label = visitee.label()
1920 except AttributeError:
1921 label =
'<Module not in a Process>' 1923 label =
'other execption' 1924 self.
doIt(visitee, label)
1926 def leave(self,visitee):
1931 print(
"Replacing all InputTag %s => %s"%(oldT,newT))
1934 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1937 self.
additionalCommands.
append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1945 if proc==HLTprocess:
return 1947 print(
"replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc))
1949 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.
additionalCommands:
1951 self.
additionalCommands.
append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1957 while '@' in repr(seqList)
and level<maxLevel:
1959 for specifiedCommand
in seqList:
1960 if specifiedCommand.startswith(
'@'):
1961 location=specifiedCommand[1:]
1962 if not location
in mapping:
1963 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1964 mappedTo=mapping[location]
1966 mappedTo=mappedTo[index]
1967 seqList.remove(specifiedCommand)
1968 seqList.extend(mappedTo.split(
'+'))
1971 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1979 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1980 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1981 from DQMOffline.Configuration.autoDQM
import autoDQM
1985 if len(set(sequenceList))!=len(sequenceList):
1986 sequenceList=list(set(sequenceList))
1987 print(
"Duplicate entries for DQM:, using",sequenceList)
1989 pathName=
'dqmoffline_step' 1990 for (i,sequence)
in enumerate(sequenceList):
1992 pathName=
'dqmoffline_%d_step'%(i)
1997 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process,sequence ) ) )
2000 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
2005 pathName=
'dqmofflineOnPAT_step' 2006 for (i,sequence)
in enumerate(postSequenceList):
2008 if (sequenceList[i]==postSequenceList[i]):
2011 pathName=
'dqmofflineOnPAT_%d_step'%(i)
2013 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process, sequence ) ) )
2017 """ Enrich the process with harvesting step """ 2018 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self.
_options.harvesting+
'_cff' 2022 sequence = sequence.split(
'.')[-1]
2025 harvestingList = sequence.split(
"+")
2026 from DQMOffline.Configuration.autoDQM
import autoDQM
2027 from Validation.Configuration.autoValidation
import autoValidation
2029 combined_mapping = copy.deepcopy( autoDQM )
2030 combined_mapping.update( autoValidation )
2031 self.
expandMapping(harvestingList,combined_mapping,index=-1)
2033 if len(set(harvestingList))!=len(harvestingList):
2034 harvestingList=list(set(harvestingList))
2035 print(
"Duplicate entries for HARVESTING, using",harvestingList)
2037 for name
in harvestingList:
2038 if not name
in harvestingConfig.__dict__:
2039 print(name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2043 harvestingstream = getattr(harvestingConfig,name)
2044 if isinstance(harvestingstream,cms.Path):
2047 if isinstance(harvestingstream,cms.Sequence):
2048 setattr(self.
process,name+
"_step",cms.Path(harvestingstream))
2055 """ Enrich the process with AlCaHarvesting step """ 2057 sequence=sequence.split(
".")[-1]
2060 harvestingList = sequence.split(
"+")
2064 from Configuration.AlCa.autoPCL
import autoPCL
2067 for name
in harvestingConfig.__dict__:
2068 harvestingstream = getattr(harvestingConfig,name)
2069 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2071 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2072 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2073 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2074 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2076 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2077 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2078 harvestingList.remove(name)
2080 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2083 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2084 print(
"The following harvesting could not be found : ", harvestingList)
2085 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2100 """ Add useful info for the production. """ 2101 self.
process.configurationMetadata=cms.untracked.PSet\
2102 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2103 name=cms.untracked.string(
"Applications"),
2104 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2112 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2114 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2123 from Configuration.StandardSequences.Eras
import eras
2124 for requestedEra
in self.
_options.era.split(
",") :
2125 modifierStrings.append(requestedEra)
2126 modifierImports.append(eras.pythonCfgLines[requestedEra])
2127 modifiers.append(getattr(eras,requestedEra))
2133 for c
in self.
_options.procModifiers:
2134 thingsImported.extend(c.split(
","))
2135 for pm
in thingsImported:
2136 modifierStrings.append(pm)
2137 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2138 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2144 if len(modifierStrings)>0:
2151 if len(modifiers)>0:
2159 def prepare(self, doChecking = False):
2160 """ Prepare the configuration string and add missing pieces.""" 2172 outputModuleCfgCode=
"" 2178 self.
pythonCfgCode +=
"# import of standard configurations\n" 2183 if not hasattr(self.
process,
"configurationMetadata"):
2205 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2206 tmpOut = cms.EndPath(output)
2207 setattr(self.
process,name+
'OutPath',tmpOut)
2216 for object
in self.
_options.inlineObjets.split(
','):
2219 if not hasattr(self.
process,object):
2220 print(
'cannot inline -'+object+
'- : not known')
2225 if self.
_options.pileup==
'HiMixEmbGEN':
2226 self.
pythonCfgCode +=
"\nprocess.generator.embeddingMode=cms.int32(1)\n" 2230 for path
in self.
process.paths:
2234 for endpath
in self.
process.endpaths:
2242 pathNames = [
'process.'+p.label_()
for p
in self.
schedule]
2243 if self.
process.schedule ==
None:
2244 self.
process.schedule = cms.Schedule()
2246 self.
process.schedule.append(item)
2247 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2250 raise Exception(
'the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2252 for index, item
in enumerate(self.
schedule):
2254 self.
process.schedule.insert(index, item)
2256 self.
process.schedule.append(item)
2258 result =
"# process.schedule imported from cff in HLTrigger.Configuration\n" 2260 result +=
'process.schedule.insert('+
str(index)+
', '+item+
')\n' 2267 self.
process.schedule.associate(getattr(self.
process, labelToAssociate))
2268 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2272 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2275 overrideThreads = (self.
_options.nThreads !=
"1")
2276 overrideConcurrentLumis = (self.
_options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2277 overrideConcurrentIOVs = (self.
_options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2279 if overrideThreads
or overrideConcurrentLumis
or overrideConcurrentIOVs:
2287 if overrideConcurrentLumis:
2288 self.
pythonCfgCode +=
"process.options.numberOfConcurrentLuminosityBlocks = "+self.
_options.nConcurrentLumis+
"\n" 2289 self.
process.options.numberOfConcurrentLuminosityBlocks =
int(self.
_options.nConcurrentLumis)
2290 if overrideConcurrentIOVs:
2291 self.
pythonCfgCode +=
"process.options.eventSetup.numberOfConcurrentIOVs = "+self.
_options.nConcurrentIOVs+
"\n" 2292 self.
process.options.eventSetup.numberOfConcurrentIOVs =
int(self.
_options.nConcurrentIOVs)
2294 if self.
_options.accelerators
is not None:
2295 accelerators = self.
_options.accelerators.split(
',')
2297 self.
pythonCfgCode +=
"# Enable only these accelerator backends\n" 2298 self.
pythonCfgCode +=
"process.load('Configuration.StandardSequences.Accelerators_cff')\n" 2299 self.
pythonCfgCode +=
"process.options.accelerators = ['" +
"', '".
join(accelerators) +
"']\n" 2300 self.
process.
load(
'Configuration.StandardSequences.Accelerators_cff')
2301 self.
process.options.accelerators = accelerators
2306 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2307 self.
pythonCfgCode +=
"MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n" 2308 MassReplaceInputTag(self.
process, new=
"rawDataMapperByLabel", old=
"rawDataCollector")
2312 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2320 for path
in self.
process.paths:
2330 print(
"--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2339 if hasattr(self.
process,
"logErrorHarvester"):
2341 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2342 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2343 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2344 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2351 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2352 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2353 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2355 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2358 imports = cms.specialImportRegistry.getSpecialImports()
2359 if len(imports) > 0:
2371 if not self.
_options.io.endswith(
'.io'): self._option.io+=
'.io' 2374 if hasattr(self.
process.source,
"fileNames"):
2375 if len(self.
process.source.fileNames.value()):
2376 ioJson[
'primary']=self.
process.source.fileNames.value()
2377 if hasattr(self.
process.source,
"secondaryFileNames"):
2378 if len(self.
process.source.secondaryFileNames.value()):
2379 ioJson[
'secondary']=self.
process.source.secondaryFileNames.value()
2380 if self.
_options.pileup_input
and (self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:')):
2381 ioJson[
'pileup']=self.
_options.pileup_input[4:]
2383 ioJson[o]=om.fileName.value()
2384 ioJson[
'GT']=self.
process.GlobalTag.globaltag.value()
2388 io.write(json.dumps(ioJson))
2391 def load(self, includeFile)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def prepare_L1REPACK(self, sequence=None)
def finalizeFastSimHLT(self)
inliner
load the relevant part
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def customiseEarlyDelete(process)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_RECO(self, sequence="reconstruction")
scheduleIndexOfFirstHLTPath
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def prepare_PATGEN(self, sequence="miniGEN")
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def prepare_RECOSIM(self, sequence="recosim")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, sequence=None)
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_NANO(self, sequence="nanoAOD")
def split(sequence, size)
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
def prepare_ALCAHARVEST(self, sequence=None)
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_NANOGEN(self, sequence="nanoAOD")
def prepare_L1(self, sequence=None)
def lumi_to_run(runs, events_in_sample, events_per_job)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def completeInputCommand(self)
def executeAndRemember(self, command)
def addStandardSequences(self)
def prepare_PAT(self, sequence="miniAOD")
nextScheduleIsConditional
put the filtering path in the schedule