3 from __future__
import print_function
4 __version__ =
"$Revision: 1.19 $" 5 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 7 import FWCore.ParameterSet.Config
as cms
8 from FWCore.ParameterSet.Modules
import _Module
11 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
17 from subprocess
import Popen,PIPE
18 import FWCore.ParameterSet.DictTypes
as DictTypes
24 defaultOptions.datamix =
'DataOnSim' 25 defaultOptions.isMC=
False 26 defaultOptions.isData=
True 27 defaultOptions.step=
'' 28 defaultOptions.pileup=
'NoPileUp' 29 defaultOptions.pileup_input =
None 30 defaultOptions.pileup_dasoption =
'' 31 defaultOptions.geometry =
'SimDB' 32 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
33 defaultOptions.magField =
'' 34 defaultOptions.conditions =
None 35 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
36 defaultOptions.harvesting=
'AtRunEnd' 37 defaultOptions.gflash =
False 38 defaultOptions.number = -1
39 defaultOptions.number_out =
None 40 defaultOptions.arguments =
"" 41 defaultOptions.name =
"NO NAME GIVEN" 42 defaultOptions.evt_type =
"" 43 defaultOptions.filein =
"" 44 defaultOptions.dasquery=
"" 45 defaultOptions.dasoption=
"" 46 defaultOptions.secondfilein =
"" 47 defaultOptions.customisation_file = []
48 defaultOptions.customisation_file_unsch = []
49 defaultOptions.customise_commands =
"" 50 defaultOptions.inline_custom=
False 51 defaultOptions.particleTable =
'pythiapdt' 52 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
53 defaultOptions.dirin =
'' 54 defaultOptions.dirout =
'' 55 defaultOptions.filetype =
'EDM' 56 defaultOptions.fileout =
'output.root' 57 defaultOptions.filtername =
'' 58 defaultOptions.lazy_download =
False 59 defaultOptions.custom_conditions =
'' 60 defaultOptions.hltProcess =
'' 61 defaultOptions.eventcontent =
None 62 defaultOptions.datatier =
None 63 defaultOptions.inlineEventContent =
True 64 defaultOptions.inlineObjets =
'' 65 defaultOptions.hideGen=
False 66 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
67 defaultOptions.beamspot=
None 68 defaultOptions.outputDefinition =
'' 69 defaultOptions.inputCommands =
None 70 defaultOptions.outputCommands =
None 71 defaultOptions.inputEventContent =
'' 72 defaultOptions.dropDescendant =
False 73 defaultOptions.relval =
None 74 defaultOptions.profile =
None 75 defaultOptions.isRepacked =
False 76 defaultOptions.restoreRNDSeeds =
False 77 defaultOptions.donotDropOnInput =
'' 78 defaultOptions.python_filename =
'' 79 defaultOptions.io=
None 80 defaultOptions.lumiToProcess=
None 81 defaultOptions.fast=
False 82 defaultOptions.runsAndWeightsForMC =
None 83 defaultOptions.runsScenarioForMC =
None 84 defaultOptions.runsAndWeightsForMCIntegerWeights =
None 85 defaultOptions.runsScenarioForMCIntegerWeights =
None 86 defaultOptions.runUnscheduled =
False 87 defaultOptions.timeoutOutput =
False 88 defaultOptions.nThreads =
'1' 89 defaultOptions.nStreams =
'0' 90 defaultOptions.nConcurrentLumis =
'0' 91 defaultOptions.nConcurrentIOVs =
'0' 92 defaultOptions.accelerators =
None 96 theObject = getattr(process,name)
97 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
98 return "process."+name+
" = " + theObject.dumpPython()
99 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
100 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 102 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 105 import FWCore.ParameterSet.Config
as cms
108 for line
in open(fileName,
'r'): 109 if line.count(
".root")>=2:
111 entries=line.replace(
"\n",
"").
split()
112 prim.append(entries[0])
113 sec.append(entries[1])
114 elif (line.find(
".root")!=-1):
115 entry=line.replace(
"\n",
"")
119 prim = [f
for f
in prim
if not (f
in file_seen
or file_seen.add(f))]
121 sec = [f
for f
in sec
if not (f
in file_seen
or file_seen.add(f))]
123 if not hasattr(s,
"fileNames"):
124 s.fileNames=cms.untracked.vstring(prim)
126 s.fileNames.extend(prim)
128 if not hasattr(s,
"secondaryFileNames"):
129 s.secondaryFileNames=cms.untracked.vstring(sec)
131 s.secondaryFileNames.extend(sec)
132 print(
"found files: ",prim)
134 raise Exception(
"There are not files in input from the file list")
136 print(
"found parent files:",sec)
141 import FWCore.ParameterSet.Config
as cms
144 print(
"the query is",query)
147 while eC!=0
and count<3:
149 print(
'Sleeping, then retrying DAS')
151 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True, universal_newlines=
True)
153 tupleP = os.waitpid(p.pid, 0)
157 print(
"DAS succeeded after",count,
"attempts",eC)
159 print(
"DAS failed 3 times- I give up")
160 for line
in pipe.split(
'\n'):
161 if line.count(
".root")>=2:
163 entries=line.replace(
"\n",
"").
split()
164 prim.append(entries[0])
165 sec.append(entries[1])
166 elif (line.find(
".root")!=-1):
167 entry=line.replace(
"\n",
"")
170 prim = sorted(list(set(prim)))
171 sec = sorted(list(set(sec)))
173 if not hasattr(s,
"fileNames"):
174 s.fileNames=cms.untracked.vstring(prim)
176 s.fileNames.extend(prim)
178 if not hasattr(s,
"secondaryFileNames"):
179 s.secondaryFileNames=cms.untracked.vstring(sec)
181 s.secondaryFileNames.extend(sec)
182 print(
"found files: ",prim)
184 print(
"found parent files:",sec)
187 def anyOf(listOfKeys,dict,opt=None):
196 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
199 """The main building routines """ 201 def __init__(self, options, process = None, with_output = False, with_input = False ):
202 """options taken from old cmsDriver and optparse """ 204 options.outfile_name = options.dirout+options.fileout
208 if self.
_options.isData
and options.isMC:
209 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
215 if (hasattr(self.
_options,
"outputDefinition")
and \
216 self.
_options.outputDefinition !=
'' and \
217 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self.
_options.outputDefinition)))
or \
218 (hasattr(self.
_options,
"datatier")
and \
221 print(
"removing ENDJOB from steps since not compatible with DQMIO dataTier")
227 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
230 for step
in self.
_options.step.split(
","):
231 if step==
'':
continue 232 stepParts = step.split(
":")
233 stepName = stepParts[0]
234 if stepName
not in stepList
and not stepName.startswith(
're'):
235 raise ValueError(
"Step {} unknown. Available are {}".
format( stepName , sorted(stepList)))
236 if len(stepParts)==1:
238 elif len(stepParts)==2:
240 elif len(stepParts)==3:
241 self.
stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
243 raise ValueError(f
"Step definition {step} invalid")
251 if hasattr(self.
_options,
"no_output_flag")
and self.
_options.no_output_flag:
279 Function to add the igprof profile service so that you can dump in the middle 282 profileOpts = self.
_options.profile.split(
':')
284 profilerInterval = 100
285 profilerFormat =
None 286 profilerJobFormat =
None 292 startEvent = profileOpts.pop(0)
293 if not startEvent.isdigit():
294 raise Exception(
"%s is not a number" % startEvent)
295 profilerStart =
int(startEvent)
297 eventInterval = profileOpts.pop(0)
298 if not eventInterval.isdigit():
299 raise Exception(
"%s is not a number" % eventInterval)
300 profilerInterval =
int(eventInterval)
302 profilerFormat = profileOpts.pop(0)
305 if not profilerFormat:
306 profilerFormat =
"%s___%s___%%I.gz" % (
307 self.
_options.evt_type.replace(
"_cfi",
""),
313 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
314 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
315 elif not profilerJobFormat:
316 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 318 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
321 includeFile = includeFile.replace(
'/',
'.')
323 return sys.modules[includeFile]
326 """helper routine to load am memorize imports""" 329 includeFile = includeFile.replace(
'/',
'.')
332 return sys.modules[includeFile]
335 """helper routine to remember replace statements""" 337 if not command.strip().startswith(
"#"):
340 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
345 self.
process.options.Rethrow = [
'ProductNotFound']
346 self.
process.options.fileMode =
'FULLMERGE' 351 self.
process.AdaptorConfig = cms.Service(
"AdaptorConfig",
352 stats = cms.untracked.bool(
True),
353 enable = cms.untracked.bool(
True),
354 cacheHint = cms.untracked.string(
"lazy-download"),
355 readHint = cms.untracked.string(
"read-ahead-buffered")
364 self.
process.IgProfService = cms.Service(
"IgProfService",
365 reportFirstEvent = cms.untracked.int32(start),
366 reportEventInterval = cms.untracked.int32(interval),
367 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
368 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
372 """Here we decide how many evts will be processed""" 379 """Here the source is built. Priority: file, generator""" 382 def filesFromOption(self):
383 for entry
in self.
_options.filein.split(
','):
385 if entry.startswith(
"filelist:"):
387 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
392 if not hasattr(self.
process.source,
"secondaryFileNames"):
393 raise Exception(
"--secondfilein not compatible with "+self.
_options.filetype+
"input type")
394 for entry
in self.
_options.secondfilein.split(
','):
396 if entry.startswith(
"filelist:"):
398 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
401 self.
process.source.secondaryFileNames.append(self.
_options.dirin+entry)
405 self.
process.source=cms.Source(
"PoolSource",
406 fileNames = cms.untracked.vstring(),
407 secondaryFileNames= cms.untracked.vstring())
408 filesFromOption(self)
409 elif self.
_options.filetype ==
"DAT":
410 self.
process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
411 filesFromOption(self)
412 elif self.
_options.filetype ==
"LHE":
413 self.
process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
414 if self.
_options.filein.startswith(
"lhe:"):
416 args=self.
_options.filein.split(
':')
418 print(
'LHE input from article ',article)
419 location=
'/store/lhe/' 421 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
422 for line
in textOfFiles:
423 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
424 self.
process.source.fileNames.append(location+article+
'/'+fileName)
427 print(
'Issue to load LHE files, please check and try again.')
430 if len(self.
process.source.fileNames)==0:
431 print(
'Issue with empty filename, but can pass line check')
434 self.
process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
436 filesFromOption(self)
438 elif self.
_options.filetype ==
"DQM":
439 self.
process.source=cms.Source(
"DQMRootSource",
440 fileNames = cms.untracked.vstring())
441 filesFromOption(self)
443 elif self.
_options.filetype ==
"DQMDAQ":
445 self.
process.source=cms.Source(
"DQMStreamerReader")
449 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
452 self.
process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
456 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
461 self.
_options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 463 self.
_options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 466 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
467 for command
in self.
_options.inputCommands.split(
','):
469 command = command.strip()
470 if command==
'':
continue 471 self.
process.source.inputCommands.append(command)
472 if not self.
_options.dropDescendant:
473 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
476 import FWCore.PythonUtilities.LumiList
as LumiList
480 if self.
process.source
is None:
481 self.
process.source=cms.Source(
"EmptySource")
487 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
488 if self.
_options.runsAndWeightsForMC:
491 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
492 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMC], str):
493 __import__(RunsAndWeights[self.
_options.runsScenarioForMC])
494 self.
runsAndWeights = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMC]].runProbabilityDistribution
499 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
506 if self.
_options.runsAndWeightsForMCIntegerWeights
or self.
_options.runsScenarioForMCIntegerWeights:
508 raise Exception(
"options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
509 if self.
_options.runsAndWeightsForMCIntegerWeights:
512 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
513 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights], str):
514 __import__(RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights])
515 self.
runsAndWeightsInt = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
521 raise Exception(
"--relval option required when using --runsAndWeightsInt")
523 from SimGeneral.Configuration.LumiToRun
import lumi_to_run
524 total_events, events_per_job = self.
_options.relval.split(
',')
526 self.
additionalCommands.
append(
"process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " +
str(lumi_to_run_mapping) +
"])")
531 """ Add output module to the process """ 535 print(
"--datatier & --eventcontent options ignored")
538 outList = eval(self.
_options.outputDefinition)
539 for (id,outDefDict)
in enumerate(outList):
540 outDefDictStr=outDefDict.__str__()
541 if not isinstance(outDefDict,dict):
542 raise Exception(
"--output needs to be passed a list of dict"+self.
_options.outputDefinition+
" is invalid")
544 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
547 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
548 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
549 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
550 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
551 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
553 if not theModuleLabel:
554 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
555 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
556 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 558 for name
in tryNames:
559 if not hasattr(self.
process,name):
562 if not theModuleLabel:
563 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
565 defaultFileName=self.
_options.outfile_name
567 defaultFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
569 theFileName=self.
_options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
570 if not theFileName.endswith(
'.root'):
574 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
575 if theStreamType==
'DQMIO': theStreamType=
'DQM' 576 if theStreamType==
'ALL':
577 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
579 theEventContent = getattr(self.
process, theStreamType+
"EventContent")
583 if theStreamType==
'ALCARECO' and not theFilterName:
584 theFilterName=
'StreamALCACombined' 587 CppType=
'PoolOutputModule' 589 CppType=
'TimeoutPoolOutputModule' 590 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 591 output = cms.OutputModule(CppType,
592 theEventContent.clone(),
593 fileName = cms.untracked.string(theFileName),
594 dataset = cms.untracked.PSet(
595 dataTier = cms.untracked.string(theTier),
596 filterName = cms.untracked.string(theFilterName))
598 if not theSelectEvent
and hasattr(self.
process,
'generation_step')
and theStreamType!=
'LHE':
599 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
600 if not theSelectEvent
and hasattr(self.
process,
'filtering_step'):
601 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
603 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
606 if not hasattr(output,
'SelectEvents'):
607 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
609 output.SelectEvents.SelectEvents.extend(getattr(self.
process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
612 if hasattr(self.
process,theModuleLabel):
613 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
615 setattr(self.
process,theModuleLabel,output)
616 outputModule=getattr(self.
process,theModuleLabel)
617 setattr(self.
process,theModuleLabel+
'_step',cms.EndPath(outputModule))
618 path=getattr(self.
process,theModuleLabel+
'_step')
621 if not self.
_options.inlineEventContent
and hasattr(self.
process,theStreamType+
"EventContent"):
622 def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process.
"+theStreamType+"EventContent.outputCommands)
"): return label
623 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
624 if theExtraOutputCommands:
625 if not isinstance(theExtraOutputCommands,list):
626 raise Exception(
"extra ouput command in --option must be a list of strings")
627 if hasattr(self.
process,theStreamType+
"EventContent"):
628 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
630 outputModule.outputCommands.extend(theExtraOutputCommands)
632 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
637 streamTypes=self.
_options.eventcontent.split(
',')
638 tiers=self.
_options.datatier.split(
',')
639 if not self.
_options.outputDefinition
and len(streamTypes)!=len(tiers):
640 raise Exception(
"number of event content arguments does not match number of datatier arguments")
646 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
647 if streamType==
'':
continue 648 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self.
_options.step:
continue 649 if streamType==
'DQMIO': streamType=
'DQM' 650 eventContent=streamType
652 if streamType ==
"NANOEDMAOD" :
653 eventContent =
"NANOAOD" 654 elif streamType ==
"NANOEDMAODSIM" :
655 eventContent =
"NANOAODSIM" 656 theEventContent = getattr(self.
process, eventContent+
"EventContent")
658 theFileName=self.
_options.outfile_name
659 theFilterName=self.
_options.filtername
661 theFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
662 theFilterName=self.
_options.filtername
663 CppType=
'PoolOutputModule' 665 CppType=
'TimeoutPoolOutputModule' 666 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 667 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 668 output = cms.OutputModule(CppType,
670 fileName = cms.untracked.string(theFileName),
671 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
672 filterName = cms.untracked.string(theFilterName)
675 if hasattr(self.
process,
"generation_step")
and streamType!=
'LHE':
676 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
677 if hasattr(self.
process,
"filtering_step"):
678 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
680 if streamType==
'ALCARECO':
681 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
683 if "MINIAOD" in streamType:
684 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
687 outputModuleName=streamType+
'output' 688 setattr(self.
process,outputModuleName,output)
689 outputModule=getattr(self.
process,outputModuleName)
690 setattr(self.
process,outputModuleName+
'_step',cms.EndPath(outputModule))
691 path=getattr(self.
process,outputModuleName+
'_step')
694 if self.
_options.outputCommands
and streamType!=
'DQM':
695 for evct
in self.
_options.outputCommands.split(
','):
696 if not evct:
continue 697 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
699 if not self.
_options.inlineEventContent:
700 tmpstreamType=streamType
701 if "NANOEDM" in tmpstreamType :
702 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
703 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
705 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
707 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
713 Add selected standard sequences to the process 717 pileupSpec=self.
_options.pileup.split(
',')[0]
720 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
721 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
722 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
726 if '.' in pileupSpec:
727 mixingDict={
'file':pileupSpec}
728 elif pileupSpec.startswith(
'file:'):
729 mixingDict={
'file':pileupSpec[5:]}
732 mixingDict=copy.copy(Mixing[pileupSpec])
733 if len(self.
_options.pileup.split(
','))>1:
734 mixingDict.update(eval(self.
_options.pileup[self.
_options.pileup.find(
',')+1:]))
737 if 'file:' in pileupSpec:
740 print(
"inlining mixing module configuration")
745 mixingDict.pop(
'file')
748 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
750 elif self.
_options.pileup_input.startswith(
"filelist:"):
753 mixingDict[
'F']=self.
_options.pileup_input.split(
',')
755 for command
in specialization:
757 if len(mixingDict)!=0:
758 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
779 stepSpec = self.
stepMap[stepName]
780 print(
"Step:", stepName,
"Spec:",stepSpec)
781 if stepName.startswith(
're'):
783 if stepName[2:]
not in self.
_options.donotDropOnInput:
784 self.
_options.inputEventContent=
'%s,%s'%(stepName.upper(),self.
_options.inputEventContent)
785 stepName=stepName[2:]
787 getattr(self,
"prepare_"+stepName)(stepSpec = getattr(self,stepName+
"DefaultSeq"))
788 elif isinstance(stepSpec, list):
789 getattr(self,
"prepare_"+stepName)(stepSpec =
'+'.
join(stepSpec))
790 elif isinstance(stepSpec, tuple):
791 getattr(self,
"prepare_"+stepName)(stepSpec =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
793 raise ValueError(
"Invalid step definition")
795 if self.
_options.restoreRNDSeeds!=
False:
797 if self.
_options.restoreRNDSeeds==
True:
798 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
800 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self.
_options.restoreRNDSeeds))
803 self.
_options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 805 self.
_options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 811 def dropSecondDropStar(iec):
821 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
822 for evct
in self.
_options.inputEventContent.split(
','):
823 if evct==
'':
continue 824 theEventContent = getattr(self.
process, evct+
"EventContent")
825 if hasattr(theEventContent,
'outputCommands'):
826 self.
process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
827 if hasattr(theEventContent,
'inputCommands'):
828 self.
process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
830 dropSecondDropStar(self.
process.source.inputCommands)
832 if not self.
_options.dropDescendant:
833 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
839 """Add conditions to the process""" 840 if not self.
_options.conditions:
return 842 if 'FrontierConditions_GlobalTag' in self.
_options.conditions:
843 print(
'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
844 self.
_options.conditions = self.
_options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
847 from Configuration.AlCa.GlobalTag
import GlobalTag
854 """Include the customise code """ 858 for c
in self.
_options.customisation_file:
859 custOpt.extend(c.split(
","))
861 for c
in self.
_options.customisation_file_unsch:
862 custOpt.extend(c.split(
","))
868 raise Exception(
"more than . in the specification:"+opt)
869 fileName=opt.split(
'.')[0]
870 if opt.count(
'.')==0: rest=
'customise' 872 rest=opt.split(
'.')[1]
873 if rest==
'py': rest=
'customise' 875 if fileName
in custMap:
876 custMap[fileName].extend(rest.split(
'+'))
878 custMap[fileName]=rest.split(
'+')
883 final_snippet=
'\n# customisation of the process.\n' 887 allFcn.extend(custMap[opt])
889 if allFcn.count(fcn)!=1:
890 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
894 packageName = f.replace(
".py",
"").
replace(
"/",
".")
895 __import__(packageName)
896 package = sys.modules[packageName]
899 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
901 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 903 for line
in file(customiseFile,
'r'): 904 if "import FWCore.ParameterSet.Config" in line:
906 final_snippet += line
908 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
909 for fcn
in custMap[f]:
910 print(
"customising the process with",fcn,
"from",f)
911 if not hasattr(package,fcn):
913 raise Exception(
"config "+f+
" has no function "+fcn)
917 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
918 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
921 final_snippet +=
'\n# End of customisation functions\n' 927 final_snippet=
'\n# Customisation from command line\n' 928 if self.
_options.customise_commands:
930 for com
in self.
_options.customise_commands.split(
'\\n'):
933 final_snippet +=
'\n'+com
944 if self.
_options.particleTable
not in defaultOptions.particleTableList:
945 print(
'Invalid particle table provided. Options are:')
946 print(defaultOptions.particleTable)
954 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreams_cff" 987 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 1025 self.
_options.beamspot=VtxSmearedDefaultKey
1030 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 1031 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 1034 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1043 if self.
_options.scenario==
'cosmics':
1045 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1046 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1047 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1057 if self.
_options.scenario==
'HeavyIons':
1059 self.
_options.beamspot=VtxSmearedHIDefaultKey
1064 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1066 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1069 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1081 self.
magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self.
_options.magField.replace(
'.',
'')+
'_cff' 1085 self.
GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff' 1089 if 'start' in self.
_options.conditions.lower():
1090 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1092 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1094 def inGeometryKeys(opt):
1095 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1096 if opt
in GeometryConf:
1097 return GeometryConf[opt]
1101 geoms=self.
_options.geometry.split(
',')
1102 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).
split(
',')
1105 if '/' in geoms[1]
or '_cff' in geoms[1]:
1108 self.
GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1110 if (geoms[0].startswith(
'DB:')):
1111 self.
SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff' 1115 if '/' in geoms[0]
or '_cff' in geoms[0]:
1118 simGeometry=geoms[0]
1120 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1122 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1125 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1126 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1129 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1134 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1135 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1142 if self.
_options.pileup==
'default':
1143 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1144 self.
_options.pileup=MixingDefaultKey
1157 output = cms.OutputModule(
"PoolOutputModule")
1158 if stream.selectEvents.parameters_().__len__()!=0:
1159 output.SelectEvents = stream.selectEvents
1161 output.SelectEvents = cms.untracked.PSet()
1162 output.SelectEvents.SelectEvents=cms.vstring()
1163 if isinstance(stream.paths,tuple):
1164 for path
in stream.paths:
1165 output.SelectEvents.SelectEvents.append(path.label())
1167 output.SelectEvents.SelectEvents.append(stream.paths.label())
1171 if isinstance(stream.content,str):
1172 evtPset=getattr(self.process,stream.content)
1173 for p
in evtPset.parameters_():
1174 setattr(output,p,getattr(evtPset,p))
1175 if not self._options.inlineEventContent:
1176 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1178 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1180 output.outputCommands = stream.content
1183 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1185 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1186 filterName = cms.untracked.string(stream.name))
1188 if self._options.filtername:
1189 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1192 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1194 if workflow
in (
"producers,full"):
1195 if isinstance(stream.paths,tuple):
1196 for path
in stream.paths:
1197 self.schedule.
append(path)
1199 self.schedule.
append(stream.paths)
1203 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1204 self.additionalOutputs[name] = output
1205 setattr(self.process,name,output)
1207 if workflow ==
'output':
1209 filterList = output.SelectEvents.SelectEvents
1210 for i, filter
in enumerate(filterList):
1211 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1221 _dotsplit = stepSpec.split(
'.')
1222 if ( len(_dotsplit)==1 ):
1223 if '/' in _dotsplit[0]:
1224 _sequence = defaultSEQ
if defaultSEQ
else stepSpec
1227 _sequence = stepSpec
1229 elif ( len(_dotsplit)==2 ):
1230 _cff,_sequence = _dotsplit
1232 print(
"sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1233 print(stepSpec,
"not recognized")
1236 return l,_sequence,_cff
1241 for i,s
in enumerate(seq.split(
'*')):
1243 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, s) ))
1245 p=getattr(self.
process,prefix)
1246 tmp = getattr(self.
process, s)
1247 if isinstance(tmp, cms.Task):
1258 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, seq) ))
1261 for i,s
in enumerate(seq.split(
'+')):
1263 setattr(self.
process,sn,getattr(cms,what)( getattr(self.
process, s) ))
1277 def prepare_ALCA(self, stepSpec = None, workflow = 'full'):
1278 """ Enrich the process with alca streams """ 1283 alcaList = sequence.split(
"+")
1284 for alca
in alcaList:
1285 if (len(alca)>MAXLEN):
1286 raise Exception(
"The following alca "+
str(alca)+
" name (with length "+
str(len(alca))+
" chars) cannot be accepted because it exceeds the DBS constraints on the length of the name of the ALCARECOs producers ("+
str(MAXLEN)+
")!")
1289 from Configuration.AlCa.autoAlca
import autoAlca, AlCaNoConcurrentLumis
1293 for name
in alcaConfig.__dict__:
1294 alcastream = getattr(alcaConfig,name)
1295 shortName = name.replace(
'ALCARECOStream',
'')
1296 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1297 if shortName
in AlCaNoConcurrentLumis:
1298 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".
format(shortName))
1299 self.
_options.nConcurrentLumis =
"1" 1300 self.
_options.nConcurrentIOVs =
"1" 1301 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1302 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1304 if 'DQM' in alcaList:
1305 if not self.
_options.inlineEventContent
and hasattr(self.
process,name):
1306 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1308 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1312 if isinstance(alcastream.paths,tuple):
1313 for path
in alcastream.paths:
1318 for i
in range(alcaList.count(shortName)):
1319 alcaList.remove(shortName)
1322 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1323 path = getattr(alcaConfig,name)
1325 alcaList.remove(
'DQM')
1327 if isinstance(alcastream,cms.Path):
1332 if len(alcaList) != 0:
1334 for name
in alcaConfig.__dict__:
1335 alcastream = getattr(alcaConfig,name)
1336 if isinstance(alcastream,cms.FilteredStream):
1337 available.append(name.replace(
'ALCARECOStream',
''))
1338 print(
"The following alcas could not be found "+
str(alcaList))
1339 print(
"available ",available)
1341 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1346 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1347 print(
"Loading lhe fragment from",loadFragment)
1348 __import__(loadFragment)
1349 self.process.
load(loadFragment)
1351 self._options.inlineObjets+=
','+stepSpec
1353 getattr(self.process,stepSpec).nEvents =
int(self._options.number)
1356 self.process.lhe_step = cms.Path( getattr( self.process,stepSpec) )
1357 self.excludedPaths.
append(
"lhe_step")
1358 self.schedule.
append( self.process.lhe_step )
1361 """ load the fragment of generator configuration """ 1368 if not '/' in loadFragment:
1369 loadFragment=
'Configuration.Generator.'+loadFragment
1371 loadFragment=loadFragment.replace(
'/',
'.')
1373 print(
"Loading generator fragment from",loadFragment)
1374 __import__(loadFragment)
1379 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1382 from Configuration.Generator.concurrentLumisDisable
import noConcurrentLumiGenerators
1384 generatorModule=sys.modules[loadFragment]
1385 genModules=generatorModule.__dict__
1396 import FWCore.ParameterSet.Modules
as cmstypes
1397 for name
in genModules:
1398 theObject = getattr(generatorModule,name)
1399 if isinstance(theObject, cmstypes._Module):
1401 if theObject.type_()
in noConcurrentLumiGenerators:
1402 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".
format(theObject.type_()))
1403 self.
_options.nConcurrentLumis =
"1" 1404 self.
_options.nConcurrentIOVs =
"1" 1405 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1406 self.
_options.inlineObjets+=
','+name
1408 if stepSpec == self.
GENDefaultSeq or stepSpec ==
'pgen_genonly' or stepSpec ==
'pgen_smear':
1409 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1411 elif 'generator' in genModules:
1414 """ Enrich the schedule with the rest of the generation step """ 1419 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1420 cffToBeLoaded=VtxSmeared[self.
_options.beamspot]
1423 raise Exception(
"VertexSmearing type or beamspot "+self.
_options.beamspot+
" unknown.")
1425 if self.
_options.scenario ==
'HeavyIons':
1426 if self.
_options.pileup==
'HiMixGEN':
1427 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1428 elif self.
_options.pileup==
'HiMixEmbGEN':
1429 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorEmbMix_cff")
1431 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1433 self.
process.generation_step = cms.Path( getattr(self.
process,_genSeqName) )
1437 self.
executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1443 """ Enrich the schedule with the summary of the filter step """ 1450 """ Enrich the schedule with the simulation step""" 1460 self.
executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1466 """ Enrich the schedule with the digitisation step""" 1470 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1472 if _digiSeq ==
'pdigi_valid' or _digiSeq ==
'pdigi_hi':
1473 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1475 if _digiSeq !=
'pdigi_nogen' and _digiSeq !=
'pdigi_valid_nogen' and _digiSeq !=
'pdigi_hi_nogen' and not self.
process.source.type_()==
'EmptySource' and not self.
_options.filetype ==
"LHE":
1476 if self.
_options.inputEventContent==
'':
1477 self.
_options.inputEventContent=
'REGEN' 1486 """ Enrich the schedule with the crossing frame writer step""" 1492 """ Enrich the schedule with the digitisation step""" 1498 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
1500 elif self.
_options.pileup_input.startswith(
"filelist:"):
1503 theFiles=self.
_options.pileup_input.split(
',')
1505 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1520 """ Enrich the schedule with the L1 simulation step""" 1527 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1528 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1529 if stepSpec
in supported:
1530 self.
loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'% stepSpec)
1531 if self.
_options.scenario ==
'HeavyIons':
1535 print(
"L1REPACK with '",stepSpec,
"' is not supported! Supported choices are: ",supported)
1539 """ Enrich the schedule with the HLT simulation step""" 1541 print(
"no specification of the hlt menu has been given, should never happen")
1542 raise Exception(
'no HLT specifications provided')
1546 from Configuration.HLT.autoHLT
import autoHLT
1549 stepSpec = autoHLT[key]
1551 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1557 if self.
_options.scenario ==
'HeavyIons':
1558 optionsForHLT[
'type'] =
'HIon' 1560 optionsForHLT[
'type'] =
'GRun' 1561 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in optionsForHLT.items())
1562 if stepSpec ==
'run,fromSource':
1563 if hasattr(self.
process.source,
'firstRun'):
1564 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1565 elif hasattr(self.
process.source,
'setRunNumber'):
1566 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1568 raise Exception(f
'Cannot replace menu to load {stepSpec}')
1570 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(stepSpec.replace(
',',
':'),optionsForHLTConfig))
1575 self.
_options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1581 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1584 if self.
process.schedule ==
None:
1585 raise Exception(
'the HLT step did not attach a valid schedule to the process')
1592 if not hasattr(self.
process,
'HLTEndSequence'):
1593 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1598 seqReco,seqDigi=stepSpec.spli(
',')
1600 print(f
"RAW2RECO requires two specifications {stepSpec} insufficient")
1614 for filt
in allMetFilterPaths:
1618 ''' Enrich the schedule with L1 HW validation ''' 1620 print(
'\n\n\n DEPRECATED this has no action \n\n\n')
1624 ''' Enrich the schedule with L1 reconstruction ''' 1630 ''' Enrich the schedule with L1 reconstruction ''' 1636 ''' Enrich the schedule with a user defined filter sequence ''' 1638 filterConfig,filterSeq = stepSpec.split(
'.')
1639 filterConfig=self.
load(filterConfig)
1641 class PrintAllModules(
object):
1645 def enter(self,visitee):
1647 label=visitee.label()
1652 def leave(self,v):
pass 1654 expander=PrintAllModules()
1655 getattr(self.
process,filterSeq).visit( expander )
1656 self.
_options.inlineObjets+=
','+expander.inliner
1657 self.
_options.inlineObjets+=
','+filterSeq
1668 ''' Enrich the schedule with reconstruction ''' 1674 ''' Enrich the schedule with reconstruction ''' 1680 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1682 print(
"ERROR: this step is only implemented for FastSim")
1689 ''' Enrich the schedule with PAT ''' 1694 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1697 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1699 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1702 if len(self.
_options.customise_commands) > 1:
1703 self.
_options.customise_commands = self.
_options.customise_commands +
" \n" 1704 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patTrigger.processName = \""+self.
_options.hltProcess+
"\"\n" 1705 self.
_options.customise_commands = self.
_options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1706 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1713 ''' Enrich the schedule with PATGEN ''' 1717 raise Exception(
"PATGEN step can only run on MC")
1722 ''' Enrich the schedule with NANO ''' 1725 custom =
"nanoAOD_customizeData" if self.
_options.isData
else "nanoAOD_customizeMC" 1726 self.
_options.customisation_file.insert(0,
'.'.
join([_nanoCff,custom]))
1728 if len(self.
_options.customise_commands) > 1:
1729 self.
_options.customise_commands = self.
_options.customise_commands +
" \n" 1730 self.
_options.customise_commands = self.
_options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1733 ''' Enrich the schedule with NANOGEN ''' 1735 fromGen =
any([x
in self.
stepMap for x
in [
'LHE',
'GEN',
'AOD']])
1738 custom =
"customizeNanoGEN" if fromGen
else "customizeNanoGENFromMini" 1740 self.
_options.customisation_file_unsch.insert(0,
'.'.
join([_nanogenCff, custom]))
1742 self.
_options.customisation_file.insert(0,
'.'.
join([_nanogenCff, custom]))
1745 ''' Enrich the schedule with skimming fragments''' 1748 skimlist=sequence.split(
'+')
1750 from Configuration.Skimming.autoSkim
import autoSkim
1754 for skim
in skimConfig.__dict__:
1755 skimstream = getattr(skimConfig,skim)
1756 if isinstance(skimstream,cms.Path):
1759 if (
not isinstance(skimstream,cms.FilteredStream)):
1761 shortname = skim.replace(
'SKIMStream',
'')
1762 if (sequence==
"all"):
1764 elif (shortname
in skimlist):
1769 skimstreamDQM = cms.FilteredStream(
1770 responsible = skimstream.responsible,
1771 name = skimstream.name+
'DQM',
1772 paths = skimstream.paths,
1773 selectEvents = skimstream.selectEvents,
1774 content = self.
_options.datatier+
'EventContent',
1775 dataTier = cms.untracked.string(self.
_options.datatier)
1778 for i
in range(skimlist.count(shortname)):
1779 skimlist.remove(shortname)
1783 if (skimlist.__len__()!=0
and sequence!=
"all"):
1784 print(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1785 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1788 ''' Enrich the schedule with a user defined sequence ''' 1794 """ Enrich the schedule with the postreco step """ 1801 print(f
"{stepSpec} in preparing validation")
1803 from Validation.Configuration.autoValidation
import autoValidation
1805 if sequence.find(
',')!=-1:
1806 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1807 valSeqName=sequence.split(
',')[1].
split(
'+')
1812 prevalSeqName=sequence.split(
'+')
1813 valSeqName=sequence.split(
'+')
1819 postfix=
'_'+sequence
1820 prevalSeqName=[
'prevalidation'+postfix]
1821 valSeqName=[
'validation'+postfix]
1822 if not hasattr(self.
process,valSeqName[0]):
1824 valSeqName=[sequence]
1836 for s
in valSeqName+prevalSeqName:
1839 for (i,s)
in enumerate(prevalSeqName):
1841 setattr(self.
process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.
process, s)) )
1844 for (i,s)
in enumerate(valSeqName):
1845 setattr(self.
process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.
process, s)))
1852 if not 'DIGI' in self.
stepMap and not self.
_options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1853 if self.
_options.restoreRNDSeeds==
False and not self.
_options.restoreRNDSeeds==
True:
1860 self.
_options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1862 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
1864 for (i,s)
in enumerate(valSeqName):
1871 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1872 It will climb down within PSets, VPSets and VInputTags to find its target""" 1873 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1879 def doIt(self,pset,base):
1880 if isinstance(pset, cms._Parameterizable):
1881 for name
in pset.parameters_().
keys():
1887 value = getattr(pset,name)
1888 type = value.pythonTypeName()
1889 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1890 self.
doIt(value,base+
"."+name)
1891 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1892 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1893 elif type
in (
'cms.string',
'cms.untracked.string'):
1897 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1898 for (i,n)
in enumerate(value):
1899 if not isinstance(n, cms.InputTag):
1906 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1907 for (i,n)
in enumerate(value):
1910 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1913 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1915 def enter(self,visitee):
1918 label = visitee.label()
1919 except AttributeError:
1920 label =
'<Module not in a Process>' 1922 label =
'other execption' 1923 self.
doIt(visitee, label)
1925 def leave(self,visitee):
1930 print(
"Replacing all InputTag %s => %s"%(oldT,newT))
1933 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1936 self.
additionalCommands.
append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1944 if proc==HLTprocess:
return 1946 print(
"replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc))
1948 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.
additionalCommands:
1950 self.
additionalCommands.
append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1956 while '@' in repr(seqList)
and level<maxLevel:
1958 for specifiedCommand
in seqList:
1959 if specifiedCommand.startswith(
'@'):
1960 location=specifiedCommand[1:]
1961 if not location
in mapping:
1962 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1963 mappedTo=mapping[location]
1965 mappedTo=mappedTo[index]
1966 seqList.remove(specifiedCommand)
1967 seqList.extend(mappedTo.split(
'+'))
1970 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1978 sequenceList=_dqmSeq.split(
'+')
1979 postSequenceList=_dqmSeq.split(
'+')
1980 from DQMOffline.Configuration.autoDQM
import autoDQM
1984 if len(set(sequenceList))!=len(sequenceList):
1985 sequenceList=list(set(sequenceList))
1986 print(
"Duplicate entries for DQM:, using",sequenceList)
1988 pathName=
'dqmoffline_step' 1989 for (i,_sequence)
in enumerate(sequenceList):
1991 pathName=
'dqmoffline_%d_step'%(i)
1996 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process,_sequence ) ) )
1999 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
2004 pathName=
'dqmofflineOnPAT_step' 2005 for (i,_sequence)
in enumerate(postSequenceList):
2007 if (sequenceList[i]==postSequenceList[i]):
2010 pathName=
'dqmofflineOnPAT_%d_step'%(i)
2012 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process, _sequence ) ) )
2016 """ Enrich the process with harvesting step """ 2017 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self.
_options.harvesting+
'_cff' 2023 harvestingList = sequence.split(
"+")
2024 from DQMOffline.Configuration.autoDQM
import autoDQM
2025 from Validation.Configuration.autoValidation
import autoValidation
2027 combined_mapping = copy.deepcopy( autoDQM )
2028 combined_mapping.update( autoValidation )
2029 self.
expandMapping(harvestingList,combined_mapping,index=-1)
2031 if len(set(harvestingList))!=len(harvestingList):
2032 harvestingList=list(set(harvestingList))
2033 print(
"Duplicate entries for HARVESTING, using",harvestingList)
2035 for name
in harvestingList:
2036 if not name
in harvestingConfig.__dict__:
2037 print(name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2041 harvestingstream = getattr(harvestingConfig,name)
2042 if isinstance(harvestingstream,cms.Path):
2045 if isinstance(harvestingstream,cms.Sequence):
2046 setattr(self.
process,name+
"_step",cms.Path(harvestingstream))
2053 """ Enrich the process with AlCaHarvesting step """ 2055 sequence=stepSpec.split(
".")[-1]
2058 harvestingList = sequence.split(
"+")
2062 from Configuration.AlCa.autoPCL
import autoPCL
2065 for name
in harvestingConfig.__dict__:
2066 harvestingstream = getattr(harvestingConfig,name)
2067 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2069 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2070 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2071 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2072 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2074 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2075 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2076 harvestingList.remove(name)
2078 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2081 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2082 print(
"The following harvesting could not be found : ", harvestingList)
2083 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2098 """ Add useful info for the production. """ 2099 self.
process.configurationMetadata=cms.untracked.PSet\
2100 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2101 name=cms.untracked.string(
"Applications"),
2102 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2110 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2112 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2121 from Configuration.StandardSequences.Eras
import eras
2122 for requestedEra
in self.
_options.era.split(
",") :
2123 modifierStrings.append(requestedEra)
2124 modifierImports.append(eras.pythonCfgLines[requestedEra])
2125 modifiers.append(getattr(eras,requestedEra))
2131 for c
in self.
_options.procModifiers:
2132 thingsImported.extend(c.split(
","))
2133 for pm
in thingsImported:
2134 modifierStrings.append(pm)
2135 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2136 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2142 if len(modifierStrings)>0:
2149 if len(modifiers)>0:
2157 def prepare(self, doChecking = False):
2158 """ Prepare the configuration string and add missing pieces.""" 2170 outputModuleCfgCode=
"" 2176 self.
pythonCfgCode +=
"# import of standard configurations\n" 2181 if not hasattr(self.
process,
"configurationMetadata"):
2203 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2204 tmpOut = cms.EndPath(output)
2205 setattr(self.
process,name+
'OutPath',tmpOut)
2214 for object
in self.
_options.inlineObjets.split(
','):
2217 if not hasattr(self.
process,object):
2218 print(
'cannot inline -'+object+
'- : not known')
2223 if self.
_options.pileup==
'HiMixEmbGEN':
2224 self.
pythonCfgCode +=
"\nprocess.generator.embeddingMode=cms.int32(1)\n" 2228 for path
in self.
process.paths:
2232 for endpath
in self.
process.endpaths:
2240 pathNames = [
'process.'+p.label_()
for p
in self.
schedule]
2241 if self.
process.schedule ==
None:
2242 self.
process.schedule = cms.Schedule()
2244 self.
process.schedule.append(item)
2245 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2248 raise Exception(
'the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2250 for index, item
in enumerate(self.
schedule):
2252 self.
process.schedule.insert(index, item)
2254 self.
process.schedule.append(item)
2256 result =
"# process.schedule imported from cff in HLTrigger.Configuration\n" 2258 result +=
'process.schedule.insert('+
str(index)+
', '+item+
')\n' 2265 self.
process.schedule.associate(getattr(self.
process, labelToAssociate))
2266 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2270 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2273 overrideThreads = (self.
_options.nThreads !=
"1")
2274 overrideConcurrentLumis = (self.
_options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2275 overrideConcurrentIOVs = (self.
_options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2277 if overrideThreads
or overrideConcurrentLumis
or overrideConcurrentIOVs:
2285 if overrideConcurrentLumis:
2286 self.
pythonCfgCode +=
"process.options.numberOfConcurrentLuminosityBlocks = "+self.
_options.nConcurrentLumis+
"\n" 2287 self.
process.options.numberOfConcurrentLuminosityBlocks =
int(self.
_options.nConcurrentLumis)
2288 if overrideConcurrentIOVs:
2289 self.
pythonCfgCode +=
"process.options.eventSetup.numberOfConcurrentIOVs = "+self.
_options.nConcurrentIOVs+
"\n" 2290 self.
process.options.eventSetup.numberOfConcurrentIOVs =
int(self.
_options.nConcurrentIOVs)
2292 if self.
_options.accelerators
is not None:
2293 accelerators = self.
_options.accelerators.split(
',')
2295 self.
pythonCfgCode +=
"# Enable only these accelerator backends\n" 2296 self.
pythonCfgCode +=
"process.load('Configuration.StandardSequences.Accelerators_cff')\n" 2297 self.
pythonCfgCode +=
"process.options.accelerators = ['" +
"', '".
join(accelerators) +
"']\n" 2298 self.
process.
load(
'Configuration.StandardSequences.Accelerators_cff')
2299 self.
process.options.accelerators = accelerators
2304 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2305 self.
pythonCfgCode +=
"MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n" 2306 MassReplaceInputTag(self.
process, new=
"rawDataMapperByLabel", old=
"rawDataCollector")
2310 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2318 for path
in self.
process.paths:
2328 print(
"--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2337 if hasattr(self.
process,
"logErrorHarvester"):
2339 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2340 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2341 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2342 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2349 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2350 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2351 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2353 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2356 imports = cms.specialImportRegistry.getSpecialImports()
2357 if len(imports) > 0:
2369 if not self.
_options.io.endswith(
'.io'): self._option.io+=
'.io' 2372 if hasattr(self.
process.source,
"fileNames"):
2373 if len(self.
process.source.fileNames.value()):
2374 ioJson[
'primary']=self.
process.source.fileNames.value()
2375 if hasattr(self.
process.source,
"secondaryFileNames"):
2376 if len(self.
process.source.secondaryFileNames.value()):
2377 ioJson[
'secondary']=self.
process.source.secondaryFileNames.value()
2378 if self.
_options.pileup_input
and (self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:')):
2379 ioJson[
'pileup']=self.
_options.pileup_input[4:]
2381 ioJson[o]=om.fileName.value()
2382 ioJson[
'GT']=self.
process.GlobalTag.globaltag.value()
2386 io.write(json.dumps(ioJson))
2389 def load(self, includeFile)
def prepare_L1(self, stepSpec=None)
def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ='')
def expandMapping(self, seqList, mapping, index=None)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def finalizeFastSimHLT(self)
inliner
load the relevant part
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def customiseEarlyDelete(process)
def prepare_CFWRITER(self, stepSpec=None)
def prepare_RECOBEFMIX(self, stepSpec="reconstruction")
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, stepSpec=None)
def build_production_info(self, evt_type, evtnumber)
def prepare_RECOSIM(self, stepSpec="recosim")
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def prepare_L1HwVal(self, stepSpec='L1HwVal')
def prepare_DIGI2RAW(self, stepSpec=None)
def prepare_POSTRECO(self, stepSpec=None)
def prepare_SKIM(self, stepSpec="all")
def prepare_ALCAPRODUCER(self, stepSpec=None)
def prepare_HARVESTING(self, stepSpec=None)
def prepare_ALCAOUTPUT(self, stepSpec=None)
def prepare_RAW2DIGI(self, stepSpec="RawToDigi")
def prepare_GEN(self, stepSpec=None)
def prepare_FILTER(self, stepSpec=None)
scheduleIndexOfFirstHLTPath
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def prepare_PAT(self, stepSpec="miniAOD")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_L1Reco(self, stepSpec="L1Reco")
def prepare_HLT(self, stepSpec=None)
def addCustomiseCmdLine(self)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, stepSpec=None)
def loadAndRemember(self, includeFile)
def prepare_ENDJOB(self, stepSpec='endOfProcess')
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_DQM(self, stepSpec='DQMOffline')
def prepare_ALCAHARVEST(self, stepSpec=None)
def split(sequence, size)
static std::string join(char **cmd)
def prepare_USER(self, stepSpec=None)
def prepare_ALCA(self, stepSpec=None, workflow='full')
def dumpPython(process, name)
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_REPACK(self, stepSpec=None)
def prepare_NANOGEN(self, stepSpec="nanoAOD")
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_L1REPACK(self, stepSpec=None)
def prepare_L1TrackTrigger(self, stepSpec="L1TrackTrigger")
def prepare_RAW2RECO(self, stepSpec=None)
def prepare_NANO(self, stepSpec='')
def prepare_VALIDATION(self, stepSpec='validation')
def lumi_to_run(runs, events_in_sample, events_per_job)
def scheduleSequenceAtEnd(self, seq, prefix)
def prepare_RECO(self, stepSpec="reconstruction")
def prepare_SIM(self, stepSpec=None)
def filesFromList(fileName, s=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def prepare_PATGEN(self, stepSpec="miniGEN")
def prepare_LHE(self, stepSpec=None)
def completeInputCommand(self)
def prepare_DATAMIX(self, stepSpec=None)
def executeAndRemember(self, command)
def addStandardSequences(self)
nextScheduleIsConditional
put the filtering path in the schedule