3 from __future__
import print_function
4 __version__ =
"$Revision: 1.19 $"
5 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
7 import FWCore.ParameterSet.Config
as cms
8 from FWCore.ParameterSet.Modules
import _Module
12 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
18 from subprocess
import Popen,PIPE
19 import FWCore.ParameterSet.DictTypes
as DictTypes
25 defaultOptions.datamix =
'DataOnSim'
26 defaultOptions.isMC=
False
27 defaultOptions.isData=
True
28 defaultOptions.step=
''
29 defaultOptions.pileup=
'NoPileUp'
30 defaultOptions.pileup_input =
None
31 defaultOptions.pileup_dasoption =
''
32 defaultOptions.geometry =
'SimDB'
33 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
34 defaultOptions.magField =
''
35 defaultOptions.conditions =
None
36 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
37 defaultOptions.harvesting=
'AtRunEnd'
38 defaultOptions.gflash =
False
39 defaultOptions.number = -1
40 defaultOptions.number_out =
None
41 defaultOptions.arguments =
""
42 defaultOptions.name =
"NO NAME GIVEN"
43 defaultOptions.evt_type =
""
44 defaultOptions.filein =
""
45 defaultOptions.dasquery=
""
46 defaultOptions.dasoption=
""
47 defaultOptions.secondfilein =
""
48 defaultOptions.customisation_file = []
49 defaultOptions.customisation_file_unsch = []
50 defaultOptions.customise_commands =
""
51 defaultOptions.inline_custom=
False
52 defaultOptions.particleTable =
'pythiapdt'
53 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
54 defaultOptions.dirin =
''
55 defaultOptions.dirout =
''
56 defaultOptions.filetype =
'EDM'
57 defaultOptions.fileout =
'output.root'
58 defaultOptions.filtername =
''
59 defaultOptions.lazy_download =
False
60 defaultOptions.custom_conditions =
''
61 defaultOptions.hltProcess =
''
62 defaultOptions.eventcontent =
None
63 defaultOptions.datatier =
None
64 defaultOptions.inlineEventContent =
True
65 defaultOptions.inlineObjets =
''
66 defaultOptions.hideGen=
False
67 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
68 defaultOptions.beamspot=
None
69 defaultOptions.outputDefinition =
''
70 defaultOptions.inputCommands =
None
71 defaultOptions.outputCommands =
None
72 defaultOptions.inputEventContent =
''
73 defaultOptions.dropDescendant =
False
74 defaultOptions.relval =
None
75 defaultOptions.profile =
None
76 defaultOptions.isRepacked =
False
77 defaultOptions.restoreRNDSeeds =
False
78 defaultOptions.donotDropOnInput =
''
79 defaultOptions.python_filename =
''
80 defaultOptions.io=
None
81 defaultOptions.lumiToProcess=
None
82 defaultOptions.fast=
False
83 defaultOptions.runsAndWeightsForMC =
None
84 defaultOptions.runsScenarioForMC =
None
85 defaultOptions.runUnscheduled =
False
86 defaultOptions.timeoutOutput =
False
87 defaultOptions.nThreads =
'1'
88 defaultOptions.nStreams =
'0'
89 defaultOptions.nConcurrentLumis =
'1'
93 theObject = getattr(process,name)
94 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
95 return "process."+name+
" = " + theObject.dumpPython()
96 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
97 return "process."+name+
" = " + theObject.dumpPython()+
"\n"
99 return "process."+name+
" = " + theObject.dumpPython()+
"\n"
102 import FWCore.ParameterSet.Config
as cms
105 for line
in open(fileName,
'r'):
106 if line.count(
".root")>=2:
108 entries=line.replace(
"\n",
"").
split()
109 prim.append(entries[0])
110 sec.append(entries[1])
111 elif (line.find(
".root")!=-1):
112 entry=line.replace(
"\n",
"")
115 prim = sorted(
list(set(prim)))
116 sec = sorted(
list(set(sec)))
118 if not hasattr(s,
"fileNames"):
119 s.fileNames=cms.untracked.vstring(prim)
121 s.fileNames.extend(prim)
123 if not hasattr(s,
"secondaryFileNames"):
124 s.secondaryFileNames=cms.untracked.vstring(sec)
126 s.secondaryFileNames.extend(sec)
127 print(
"found files: ",prim)
129 raise Exception(
"There are not files in input from the file list")
131 print(
"found parent files:",sec)
136 import FWCore.ParameterSet.Config
as cms
139 print(
"the query is",query)
142 while eC!=0
and count<3:
144 print(
'Sleeping, then retrying DAS')
146 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True, universal_newlines=
True)
148 tupleP = os.waitpid(p.pid, 0)
152 print(
"DAS succeeded after",count,
"attempts",eC)
154 print(
"DAS failed 3 times- I give up")
155 for line
in pipe.split(
'\n'):
156 if line.count(
".root")>=2:
158 entries=line.replace(
"\n",
"").
split()
159 prim.append(entries[0])
160 sec.append(entries[1])
161 elif (line.find(
".root")!=-1):
162 entry=line.replace(
"\n",
"")
165 prim = sorted(
list(set(prim)))
166 sec = sorted(
list(set(sec)))
168 if not hasattr(s,
"fileNames"):
169 s.fileNames=cms.untracked.vstring(prim)
171 s.fileNames.extend(prim)
173 if not hasattr(s,
"secondaryFileNames"):
174 s.secondaryFileNames=cms.untracked.vstring(sec)
176 s.secondaryFileNames.extend(sec)
177 print(
"found files: ",prim)
179 print(
"found parent files:",sec)
182 def anyOf(listOfKeys,dict,opt=None):
191 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
194 """The main building routines """
196 def __init__(self, options, process = None, with_output = False, with_input = False ):
197 """options taken from old cmsDriver and optparse """
199 options.outfile_name = options.dirout+options.fileout
203 if self.
_options.isData
and options.isMC:
204 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
210 if (hasattr(self.
_options,
"outputDefinition")
and \
211 self.
_options.outputDefinition !=
'' and \
212 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self.
_options.outputDefinition)))
or \
213 (hasattr(self.
_options,
"datatier")
and \
216 print(
"removing ENDJOB from steps since not compatible with DQMIO dataTier")
222 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
225 for step
in self.
_options.step.split(
","):
226 if step==
'':
continue
227 stepParts = step.split(
":")
228 stepName = stepParts[0]
229 if stepName
not in stepList
and not stepName.startswith(
're'):
230 raise ValueError(
"Step "+stepName+
" unknown")
231 if len(stepParts)==1:
233 elif len(stepParts)==2:
235 elif len(stepParts)==3:
236 self.
stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
238 raise ValueError(
"Step definition "+step+
" invalid")
246 if hasattr(self.
_options,
"no_output_flag")
and self.
_options.no_output_flag:
273 Function to add the igprof profile service so that you can dump in the middle
276 profileOpts = self.
_options.profile.split(
':')
278 profilerInterval = 100
279 profilerFormat =
None
280 profilerJobFormat =
None
286 startEvent = profileOpts.pop(0)
287 if not startEvent.isdigit():
288 raise Exception(
"%s is not a number" % startEvent)
289 profilerStart =
int(startEvent)
291 eventInterval = profileOpts.pop(0)
292 if not eventInterval.isdigit():
293 raise Exception(
"%s is not a number" % eventInterval)
294 profilerInterval =
int(eventInterval)
296 profilerFormat = profileOpts.pop(0)
299 if not profilerFormat:
300 profilerFormat =
"%s___%s___%%I.gz" % (
301 self.
_options.evt_type.replace(
"_cfi",
""),
307 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
308 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
309 elif not profilerJobFormat:
310 profilerJobFormat = profilerFormat +
"_EndOfJob.gz"
312 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
315 includeFile = includeFile.replace(
'/',
'.')
317 return sys.modules[includeFile]
320 """helper routine to load am memorize imports"""
323 includeFile = includeFile.replace(
'/',
'.')
326 return sys.modules[includeFile]
329 """helper routine to remember replace statements"""
331 if not command.strip().startswith(
"#"):
334 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
339 self.
process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring(
'ProductNotFound'),fileMode = cms.untracked.string(
'FULLMERGE'))
341 self.
process.options = cms.untracked.PSet( )
346 self.
process.AdaptorConfig = cms.Service(
"AdaptorConfig",
347 stats = cms.untracked.bool(
True),
348 enable = cms.untracked.bool(
True),
349 cacheHint = cms.untracked.string(
"lazy-download"),
350 readHint = cms.untracked.string(
"read-ahead-buffered")
359 self.
process.IgProfService = cms.Service(
"IgProfService",
360 reportFirstEvent = cms.untracked.int32(start),
361 reportEventInterval = cms.untracked.int32(interval),
362 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
363 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
367 """Here we decide how many evts will be processed"""
368 self.
process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(
int(self.
_options.number)))
374 """Here the source is built. Priority: file, generator"""
377 def filesFromOption(self):
378 for entry
in self.
_options.filein.split(
','):
380 if entry.startswith(
"filelist:"):
382 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
387 if not hasattr(self.
process.source,
"secondaryFileNames"):
388 raise Exception(
"--secondfilein not compatible with "+self.
_options.filetype+
"input type")
389 for entry
in self.
_options.secondfilein.split(
','):
391 if entry.startswith(
"filelist:"):
393 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
396 self.
process.source.secondaryFileNames.append(self.
_options.dirin+entry)
400 self.
process.source=cms.Source(
"PoolSource",
401 fileNames = cms.untracked.vstring(),
402 secondaryFileNames= cms.untracked.vstring())
403 filesFromOption(self)
404 elif self.
_options.filetype ==
"DAT":
405 self.
process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
406 filesFromOption(self)
407 elif self.
_options.filetype ==
"LHE":
408 self.
process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
409 if self.
_options.filein.startswith(
"lhe:"):
411 args=self.
_options.filein.split(
':')
413 print(
'LHE input from article ',article)
414 location=
'/store/lhe/'
416 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
417 for line
in textOfFiles:
418 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
419 self.
process.source.fileNames.append(location+article+
'/'+fileName)
422 print(
'Issue to load LHE files, please check and try again.')
425 if len(self.
process.source.fileNames)==0:
426 print(
'Issue with empty filename, but can pass line check')
429 self.
process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
431 filesFromOption(self)
433 elif self.
_options.filetype ==
"DQM":
434 self.
process.source=cms.Source(
"DQMRootSource",
435 fileNames = cms.untracked.vstring())
436 filesFromOption(self)
438 elif self.
_options.filetype ==
"DQMDAQ":
440 self.
process.source=cms.Source(
"DQMStreamerReader")
444 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
447 self.
process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
451 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
456 self.
_options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,'
458 self.
_options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,'
461 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
462 for command
in self.
_options.inputCommands.split(
','):
464 command = command.strip()
465 if command==
'':
continue
466 self.
process.source.inputCommands.append(command)
467 if not self.
_options.dropDescendant:
468 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
471 import FWCore.PythonUtilities.LumiList
as LumiList
475 if self.
process.source
is None:
476 self.
process.source=cms.Source(
"EmptySource")
482 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
483 if self.
_options.runsAndWeightsForMC:
486 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
487 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMC], str):
488 __import__(RunsAndWeights[self.
_options.runsScenarioForMC])
489 self.
runsAndWeights = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMC]].runProbabilityDistribution
494 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
502 """ Add output module to the process """
506 print(
"--datatier & --eventcontent options ignored")
509 outList = eval(self.
_options.outputDefinition)
510 for (id,outDefDict)
in enumerate(outList):
511 outDefDictStr=outDefDict.__str__()
512 if not isinstance(outDefDict,dict):
513 raise Exception(
"--output needs to be passed a list of dict"+self.
_options.outputDefinition+
" is invalid")
515 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
518 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
519 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
520 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
521 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
522 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
524 if not theModuleLabel:
525 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
526 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
527 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output'
529 for name
in tryNames:
530 if not hasattr(self.
process,name):
533 if not theModuleLabel:
534 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
536 defaultFileName=self.
_options.outfile_name
538 defaultFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
540 theFileName=self.
_options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
541 if not theFileName.endswith(
'.root'):
545 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
546 if theStreamType==
'DQMIO': theStreamType=
'DQM'
547 if theStreamType==
'ALL':
548 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
550 theEventContent = getattr(self.
process, theStreamType+
"EventContent")
554 if theStreamType==
'ALCARECO' and not theFilterName:
555 theFilterName=
'StreamALCACombined'
558 CppType=
'PoolOutputModule'
560 CppType=
'TimeoutPoolOutputModule'
561 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule'
562 output = cms.OutputModule(CppType,
563 theEventContent.clone(),
564 fileName = cms.untracked.string(theFileName),
565 dataset = cms.untracked.PSet(
566 dataTier = cms.untracked.string(theTier),
567 filterName = cms.untracked.string(theFilterName))
569 if not theSelectEvent
and hasattr(self.
process,
'generation_step')
and theStreamType!=
'LHE':
570 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
571 if not theSelectEvent
and hasattr(self.
process,
'filtering_step'):
572 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
574 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
577 if not hasattr(output,
'SelectEvents'):
578 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
580 output.SelectEvents.SelectEvents.extend(getattr(self.
process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
583 if hasattr(self.
process,theModuleLabel):
584 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
586 setattr(self.
process,theModuleLabel,output)
587 outputModule=getattr(self.
process,theModuleLabel)
588 setattr(self.
process,theModuleLabel+
'_step',cms.EndPath(outputModule))
589 path=getattr(self.
process,theModuleLabel+
'_step')
592 if not self.
_options.inlineEventContent
and hasattr(self.
process,theStreamType+
"EventContent"):
593 def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process.
"+theStreamType+"EventContent.outputCommands)
"): return label
594 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
595 if theExtraOutputCommands:
596 if not isinstance(theExtraOutputCommands,list):
597 raise Exception(
"extra ouput command in --option must be a list of strings")
598 if hasattr(self.
process,theStreamType+
"EventContent"):
599 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
601 outputModule.outputCommands.extend(theExtraOutputCommands)
603 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
608 streamTypes=self.
_options.eventcontent.split(
',')
609 tiers=self.
_options.datatier.split(
',')
610 if not self.
_options.outputDefinition
and len(streamTypes)!=len(tiers):
611 raise Exception(
"number of event content arguments does not match number of datatier arguments")
617 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
618 if streamType==
'':
continue
619 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self.
_options.step:
continue
620 if streamType==
'DQMIO': streamType=
'DQM'
621 eventContent=streamType
623 if streamType ==
"NANOEDMAOD" :
624 eventContent =
"NANOAOD"
625 elif streamType ==
"NANOEDMAODSIM" :
626 eventContent =
"NANOAODSIM"
627 theEventContent = getattr(self.
process, eventContent+
"EventContent")
629 theFileName=self.
_options.outfile_name
630 theFilterName=self.
_options.filtername
632 theFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
633 theFilterName=self.
_options.filtername
634 CppType=
'PoolOutputModule'
636 CppType=
'TimeoutPoolOutputModule'
637 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule'
638 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule'
639 output = cms.OutputModule(CppType,
641 fileName = cms.untracked.string(theFileName),
642 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
643 filterName = cms.untracked.string(theFilterName)
646 if hasattr(self.
process,
"generation_step")
and streamType!=
'LHE':
647 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
648 if hasattr(self.
process,
"filtering_step"):
649 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
651 if streamType==
'ALCARECO':
652 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
654 if "MINIAOD" in streamType:
655 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
658 outputModuleName=streamType+
'output'
659 setattr(self.
process,outputModuleName,output)
660 outputModule=getattr(self.
process,outputModuleName)
661 setattr(self.
process,outputModuleName+
'_step',cms.EndPath(outputModule))
662 path=getattr(self.
process,outputModuleName+
'_step')
665 if self.
_options.outputCommands
and streamType!=
'DQM':
666 for evct
in self.
_options.outputCommands.split(
','):
667 if not evct:
continue
668 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
670 if not self.
_options.inlineEventContent:
671 tmpstreamType=streamType
672 if "NANOEDM" in tmpstreamType :
673 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
674 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
676 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
678 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
684 Add selected standard sequences to the process
688 pileupSpec=self.
_options.pileup.split(
',')[0]
691 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
692 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
693 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
697 if '.' in pileupSpec:
698 mixingDict={
'file':pileupSpec}
699 elif pileupSpec.startswith(
'file:'):
700 mixingDict={
'file':pileupSpec[5:]}
703 mixingDict=copy.copy(Mixing[pileupSpec])
704 if len(self.
_options.pileup.split(
','))>1:
705 mixingDict.update(eval(self.
_options.pileup[self.
_options.pileup.find(
',')+1:]))
708 if 'file:' in pileupSpec:
711 print(
"inlining mixing module configuration")
716 mixingDict.pop(
'file')
719 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
721 elif self.
_options.pileup_input.startswith(
"filelist:"):
724 mixingDict[
'F']=self.
_options.pileup_input.split(
',')
726 for command
in specialization:
728 if len(mixingDict)!=0:
729 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
748 stepSpec = self.
stepMap[stepName]
749 print(
"Step:", stepName,
"Spec:",stepSpec)
750 if stepName.startswith(
're'):
752 if stepName[2:]
not in self.
_options.donotDropOnInput:
753 self.
_options.inputEventContent=
'%s,%s'%(stepName.upper(),self.
_options.inputEventContent)
754 stepName=stepName[2:]
756 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
757 elif isinstance(stepSpec, list):
758 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
759 elif isinstance(stepSpec, tuple):
760 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
762 raise ValueError(
"Invalid step definition")
764 if self.
_options.restoreRNDSeeds!=
False:
766 if self.
_options.restoreRNDSeeds==
True:
767 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
769 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self.
_options.restoreRNDSeeds))
772 self.
_options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,'
774 self.
_options.inputCommands=
'keep *_randomEngineStateProducer_*_*,'
780 def dropSecondDropStar(iec):
790 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
791 for evct
in self.
_options.inputEventContent.split(
','):
792 if evct==
'':
continue
793 theEventContent = getattr(self.
process, evct+
"EventContent")
794 if hasattr(theEventContent,
'outputCommands'):
795 self.
process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
796 if hasattr(theEventContent,
'inputCommands'):
797 self.
process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
799 dropSecondDropStar(self.
process.source.inputCommands)
801 if not self.
_options.dropDescendant:
802 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
808 """Add conditions to the process"""
809 if not self.
_options.conditions:
return
811 if 'FrontierConditions_GlobalTag' in self.
_options.conditions:
812 print(
'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
813 self.
_options.conditions = self.
_options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
823 """Include the customise code """
827 for c
in self.
_options.customisation_file:
828 custOpt.extend(c.split(
","))
830 for c
in self.
_options.customisation_file_unsch:
831 custOpt.extend(c.split(
","))
837 raise Exception(
"more than . in the specification:"+opt)
838 fileName=opt.split(
'.')[0]
839 if opt.count(
'.')==0: rest=
'customise'
841 rest=opt.split(
'.')[1]
842 if rest==
'py': rest=
'customise'
844 if fileName
in custMap:
845 custMap[fileName].extend(rest.split(
'+'))
847 custMap[fileName]=rest.split(
'+')
852 final_snippet=
'\n# customisation of the process.\n'
856 allFcn.extend(custMap[opt])
858 if allFcn.count(fcn)!=1:
859 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
863 packageName = f.replace(
".py",
"").
replace(
"/",
".")
864 __import__(packageName)
865 package = sys.modules[packageName]
868 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
870 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n'
872 for line
in file(customiseFile,
'r'):
873 if "import FWCore.ParameterSet.Config" in line:
875 final_snippet += line
877 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
878 for fcn
in custMap[f]:
879 print(
"customising the process with",fcn,
"from",f)
880 if not hasattr(package,fcn):
882 raise Exception(
"config "+f+
" has no function "+fcn)
886 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
887 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
890 final_snippet +=
'\n# End of customisation functions\n'
896 final_snippet=
'\n# Customisation from command line\n'
897 if self.
_options.customise_commands:
899 for com
in self.
_options.customise_commands.split(
'\\n'):
902 final_snippet +=
'\n'+com
913 if self.
_options.particleTable
not in defaultOptions.particleTableList:
914 print(
'Invalid particle table provided. Options are:')
915 print(defaultOptions.particleTable)
923 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreams_cff"
956 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff'
993 self.
_options.beamspot=VtxSmearedDefaultKey
998 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff"
999 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff"
1002 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
1011 if self.
_options.scenario==
'cosmics':
1013 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff"
1014 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff"
1015 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff"
1025 if self.
_options.scenario==
'HeavyIons':
1027 self.
_options.beamspot=VtxSmearedHIDefaultKey
1032 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1034 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1037 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff"
1050 if self.
_options.magField==defaultOptions.magField:
1051 print(
"magnetic field option forced to: AutoFromDBCurrent")
1052 self.
_options.magField=
'AutoFromDBCurrent'
1053 self.
magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self.
_options.magField.replace(
'.',
'')+
'_cff'
1057 self.
GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff'
1061 if 'start' in self.
_options.conditions.lower():
1062 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff'
1064 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff'
1066 def inGeometryKeys(opt):
1067 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1068 if opt
in GeometryConf:
1069 return GeometryConf[opt]
1073 geoms=self.
_options.geometry.split(
',')
1074 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).
split(
',')
1077 if '/' in geoms[1]
or '_cff' in geoms[1]:
1080 self.
GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff'
1082 if (geoms[0].startswith(
'DB:')):
1083 self.
SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff'
1087 if '/' in geoms[0]
or '_cff' in geoms[0]:
1090 simGeometry=geoms[0]
1092 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff'
1094 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff'
1097 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1098 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff"
1101 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff"
1106 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff'
1107 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1114 if self.
_options.pileup==
'default':
1115 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1116 self.
_options.pileup=MixingDefaultKey
1129 output = cms.OutputModule(
"PoolOutputModule")
1130 if stream.selectEvents.parameters_().__len__()!=0:
1131 output.SelectEvents = stream.selectEvents
1133 output.SelectEvents = cms.untracked.PSet()
1134 output.SelectEvents.SelectEvents=cms.vstring()
1135 if isinstance(stream.paths,tuple):
1136 for path
in stream.paths:
1137 output.SelectEvents.SelectEvents.append(path.label())
1139 output.SelectEvents.SelectEvents.append(stream.paths.label())
1143 if isinstance(stream.content,str):
1144 evtPset=getattr(self.process,stream.content)
1145 for p
in evtPset.parameters_():
1146 setattr(output,p,getattr(evtPset,p))
1147 if not self._options.inlineEventContent:
1148 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1150 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1152 output.outputCommands = stream.content
1155 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1157 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1158 filterName = cms.untracked.string(stream.name))
1160 if self._options.filtername:
1161 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1164 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1166 if workflow
in (
"producers,full"):
1167 if isinstance(stream.paths,tuple):
1168 for path
in stream.paths:
1169 self.schedule.
append(path)
1171 self.schedule.
append(stream.paths)
1175 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1176 self.additionalOutputs[name] = output
1177 setattr(self.process,name,output)
1179 if workflow ==
'output':
1181 filterList = output.SelectEvents.SelectEvents
1182 for i, filter
in enumerate(filterList):
1183 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1193 if ( len(sequence.split(
'.'))==1 ):
1195 elif ( len(sequence.split(
'.'))==2 ):
1197 sequence=sequence.split(
'.')[1]
1199 print(
"sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1200 print(sequence,
"not recognized")
1207 for i,s
in enumerate(seq.split(
'*')):
1209 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, s) ))
1211 p=getattr(self.
process,prefix)
1212 tmp = getattr(self.
process, s)
1213 if isinstance(tmp, cms.Task):
1224 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, seq) ))
1227 for i,s
in enumerate(seq.split(
'+')):
1229 setattr(self.
process,sn,getattr(cms,what)( getattr(self.
process, s) ))
1243 def prepare_ALCA(self, sequence = None, workflow = 'full'):
1244 """ Enrich the process with alca streams """
1246 sequence = sequence.split(
'.')[-1]
1249 alcaList = sequence.split(
"+")
1251 from Configuration.AlCa.autoAlca
import autoAlca
1255 for name
in alcaConfig.__dict__:
1256 alcastream = getattr(alcaConfig,name)
1257 shortName = name.replace(
'ALCARECOStream',
'')
1258 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1259 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1260 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1262 if 'DQM' in alcaList:
1263 if not self.
_options.inlineEventContent
and hasattr(self.
process,name):
1264 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1266 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1270 if isinstance(alcastream.paths,tuple):
1271 for path
in alcastream.paths:
1276 for i
in range(alcaList.count(shortName)):
1277 alcaList.remove(shortName)
1280 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1281 path = getattr(alcaConfig,name)
1283 alcaList.remove(
'DQM')
1285 if isinstance(alcastream,cms.Path):
1290 if len(alcaList) != 0:
1292 for name
in alcaConfig.__dict__:
1293 alcastream = getattr(alcaConfig,name)
1294 if isinstance(alcastream,cms.FilteredStream):
1295 available.append(name.replace(
'ALCARECOStream',
''))
1296 print(
"The following alcas could not be found "+
str(alcaList))
1297 print(
"available ",available)
1299 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1304 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1305 print(
"Loading lhe fragment from",loadFragment)
1306 __import__(loadFragment)
1307 self.process.
load(loadFragment)
1309 self._options.inlineObjets+=
','+sequence
1311 getattr(self.process,sequence).nEvents =
int(self._options.number)
1314 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1315 self.excludedPaths.
append(
"lhe_step")
1316 self.schedule.
append( self.process.lhe_step )
1319 """ load the fragment of generator configuration """
1326 if not '/' in loadFragment:
1327 loadFragment=
'Configuration.Generator.'+loadFragment
1329 loadFragment=loadFragment.replace(
'/',
'.')
1331 print(
"Loading generator fragment from",loadFragment)
1332 __import__(loadFragment)
1337 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1340 generatorModule=sys.modules[loadFragment]
1341 genModules=generatorModule.__dict__
1352 import FWCore.ParameterSet.Modules
as cmstypes
1353 for name
in genModules:
1354 theObject = getattr(generatorModule,name)
1355 if isinstance(theObject, cmstypes._Module):
1357 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1358 self.
_options.inlineObjets+=
','+name
1360 if sequence == self.
GENDefaultSeq or sequence ==
'pgen_genonly':
1361 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1363 elif 'generator' in genModules:
1366 """ Enrich the schedule with the rest of the generation step """
1368 genSeqName=sequence.split(
'.')[-1]
1372 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1373 cffToBeLoaded=VtxSmeared[self.
_options.beamspot]
1376 raise Exception(
"VertexSmearing type or beamspot "+self.
_options.beamspot+
" unknown.")
1378 if self.
_options.scenario ==
'HeavyIons':
1379 if self.
_options.pileup==
'HiMixGEN':
1380 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1382 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1384 self.
process.generation_step = cms.Path( getattr(self.
process,genSeqName) )
1388 self.
executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1394 """ Enrich the schedule with the summary of the filter step """
1401 """ Enrich the schedule with the simulation step"""
1411 self.
executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1417 """ Enrich the schedule with the digitisation step"""
1421 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1423 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1424 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1426 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and sequence !=
'pdigi_hi_nogen' and not self.
process.source.type_()==
'EmptySource' and not self.
_options.filetype ==
"LHE":
1427 if self.
_options.inputEventContent==
'':
1428 self.
_options.inputEventContent=
'REGEN'
1437 """ Enrich the schedule with the crossing frame writer step"""
1443 """ Enrich the schedule with the digitisation step"""
1449 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
1451 elif self.
_options.pileup_input.startswith(
"filelist:"):
1454 theFiles=self.
_options.pileup_input.split(
',')
1456 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1471 """ Enrich the schedule with the L1 simulation step"""
1478 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1479 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1480 if sequence
in supported:
1481 self.
loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1482 if self.
_options.scenario ==
'HeavyIons':
1486 print(
"L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported)
1491 """ Enrich the schedule with the HLT simulation step"""
1493 print(
"no specification of the hlt menu has been given, should never happen")
1494 raise Exception(
'no HLT sequence provided')
1498 from Configuration.HLT.autoHLT
import autoHLT
1501 sequence = autoHLT[key]
1503 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1509 if self.
_options.scenario ==
'HeavyIons':
1510 optionsForHLT[
'type'] =
'HIon'
1512 optionsForHLT[
'type'] =
'GRun'
1513 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in six.iteritems(optionsForHLT))
1514 if sequence ==
'run,fromSource':
1515 if hasattr(self.
process.source,
'firstRun'):
1516 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1517 elif hasattr(self.
process.source,
'setRunNumber'):
1518 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1520 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1522 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1527 self.
_options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1533 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1541 if not hasattr(self.
process,
'HLTEndSequence'):
1542 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1547 seqReco=sequence.split(
',')[1]
1548 seqDigi=sequence.split(
',')[0]
1550 print(
"RAW2RECO requires two specifications",sequence,
"insufficient")
1566 for filt
in allMetFilterPaths:
1570 ''' Enrich the schedule with L1 HW validation '''
1573 print(
'\n\n\n DEPRECATED this has no action \n\n\n')
1577 ''' Enrich the schedule with L1 reconstruction '''
1583 ''' Enrich the schedule with L1 reconstruction '''
1589 ''' Enrich the schedule with a user defined filter sequence '''
1591 filterConfig=self.
load(sequence.split(
'.')[0])
1592 filterSeq=sequence.split(
'.')[-1]
1594 class PrintAllModules(
object):
1598 def enter(self,visitee):
1600 label=visitee.label()
1605 def leave(self,v):
pass
1607 expander=PrintAllModules()
1608 getattr(self.
process,filterSeq).visit( expander )
1609 self.
_options.inlineObjets+=
','+expander.inliner
1610 self.
_options.inlineObjets+=
','+filterSeq
1621 ''' Enrich the schedule with reconstruction '''
1627 ''' Enrich the schedule with reconstruction '''
1633 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1635 print(
"ERROR: this step is only implemented for FastSim")
1638 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1642 ''' Enrich the schedule with PAT '''
1646 if not self.
_options.runUnscheduled:
1647 raise Exception(
"MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1649 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1652 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1654 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1657 if len(self.
_options.customise_commands) > 1:
1658 self.
_options.customise_commands = self.
_options.customise_commands +
" \n"
1659 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patTrigger.processName = \""+self.
_options.hltProcess+
"\"\n"
1660 self.
_options.customise_commands = self.
_options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n"
1661 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n"
1668 ''' Enrich the schedule with PATGEN '''
1671 if not self.
_options.runUnscheduled:
1672 raise Exception(
"MiniGEN production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1674 raise Exception(
"PATGEN step can only run on MC")
1678 ''' Enrich the schedule with NANO '''
1681 custom =
"nanoAOD_customizeData" if self.
_options.isData
else "nanoAOD_customizeMC"
1683 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1685 self.
_options.customisation_file.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1687 if len(self.
_options.customise_commands) > 1:
1688 self.
_options.customise_commands = self.
_options.customise_commands +
" \n"
1689 self.
_options.customise_commands = self.
_options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n"
1693 ''' Enrich the schedule with event interpretation '''
1694 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1695 if sequence
in EventInterpretation:
1697 sequence =
'EIsequence'
1699 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1705 ''' Enrich the schedule with skimming fragments'''
1707 sequence = sequence.split(
'.')[-1]
1709 skimlist=sequence.split(
'+')
1711 from Configuration.Skimming.autoSkim
import autoSkim
1715 for skim
in skimConfig.__dict__:
1716 skimstream = getattr(skimConfig,skim)
1717 if isinstance(skimstream,cms.Path):
1720 if (
not isinstance(skimstream,cms.FilteredStream)):
1722 shortname = skim.replace(
'SKIMStream',
'')
1723 if (sequence==
"all"):
1725 elif (shortname
in skimlist):
1730 skimstreamDQM = cms.FilteredStream(
1731 responsible = skimstream.responsible,
1732 name = skimstream.name+
'DQM',
1733 paths = skimstream.paths,
1734 selectEvents = skimstream.selectEvents,
1735 content = self.
_options.datatier+
'EventContent',
1736 dataTier = cms.untracked.string(self.
_options.datatier)
1739 for i
in range(skimlist.count(shortname)):
1740 skimlist.remove(shortname)
1744 if (skimlist.__len__()!=0
and sequence!=
"all"):
1745 print(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1746 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1749 ''' Enrich the schedule with a user defined sequence '''
1755 """ Enrich the schedule with the postreco step """
1762 print(sequence,
"in preparing validation")
1764 from Validation.Configuration.autoValidation
import autoValidation
1766 sequence=sequence.split(
'.')[-1]
1767 if sequence.find(
',')!=-1:
1768 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1769 valSeqName=sequence.split(
',')[1].
split(
'+')
1774 prevalSeqName=sequence.split(
'+')
1775 valSeqName=sequence.split(
'+')
1781 postfix=
'_'+sequence
1782 prevalSeqName=[
'prevalidation'+postfix]
1783 valSeqName=[
'validation'+postfix]
1784 if not hasattr(self.
process,valSeqName[0]):
1786 valSeqName=[sequence]
1798 for s
in valSeqName+prevalSeqName:
1801 for (i,s)
in enumerate(prevalSeqName):
1803 setattr(self.
process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.
process, s)) )
1806 for (i,s)
in enumerate(valSeqName):
1807 setattr(self.
process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.
process, s)))
1814 if not 'DIGI' in self.
stepMap and not self.
_options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1815 if self.
_options.restoreRNDSeeds==
False and not self.
_options.restoreRNDSeeds==
True:
1822 self.
_options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1824 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
1826 for (i,s)
in enumerate(valSeqName):
1833 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1834 It will climb down within PSets, VPSets and VInputTags to find its target"""
1835 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1841 def doIt(self,pset,base):
1842 if isinstance(pset, cms._Parameterizable):
1843 for name
in pset.parameters_().
keys():
1849 value = getattr(pset,name)
1850 type = value.pythonTypeName()
1851 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1852 self.
doIt(value,base+
"."+name)
1853 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1854 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1855 elif type
in (
'cms.string',
'cms.untracked.string'):
1859 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1860 for (i,n)
in enumerate(value):
1861 if not isinstance(n, cms.InputTag):
1868 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1869 for (i,n)
in enumerate(value):
1872 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1875 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1877 def enter(self,visitee):
1880 label = visitee.label()
1881 except AttributeError:
1882 label =
'<Module not in a Process>'
1884 label =
'other execption'
1885 self.
doIt(visitee, label)
1887 def leave(self,visitee):
1892 print(
"Replacing all InputTag %s => %s"%(oldT,newT))
1895 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1898 self.
additionalCommands.
append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1906 if proc==HLTprocess:
return
1908 print(
"replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc))
1910 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.
additionalCommands:
1912 self.
additionalCommands.
append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1918 while '@' in repr(seqList)
and level<maxLevel:
1920 for specifiedCommand
in seqList:
1921 if specifiedCommand.startswith(
'@'):
1922 location=specifiedCommand[1:]
1923 if not location
in mapping:
1924 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1925 mappedTo=mapping[location]
1927 mappedTo=mappedTo[index]
1928 seqList.remove(specifiedCommand)
1929 seqList.extend(mappedTo.split(
'+'))
1932 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1940 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1941 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1942 from DQMOffline.Configuration.autoDQM
import autoDQM
1946 if len(set(sequenceList))!=len(sequenceList):
1947 sequenceList=
list(set(sequenceList))
1948 print(
"Duplicate entries for DQM:, using",sequenceList)
1950 pathName=
'dqmoffline_step'
1951 for (i,sequence)
in enumerate(sequenceList):
1953 pathName=
'dqmoffline_%d_step'%(i)
1958 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process,sequence ) ) )
1961 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
1966 pathName=
'dqmofflineOnPAT_step'
1967 for (i,sequence)
in enumerate(postSequenceList):
1969 if (sequenceList[i]==postSequenceList[i]):
1972 pathName=
'dqmofflineOnPAT_%d_step'%(i)
1974 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process, sequence ) ) )
1978 """ Enrich the process with harvesting step """
1979 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self.
_options.harvesting+
'_cff'
1983 sequence = sequence.split(
'.')[-1]
1986 harvestingList = sequence.split(
"+")
1987 from DQMOffline.Configuration.autoDQM
import autoDQM
1988 from Validation.Configuration.autoValidation
import autoValidation
1990 combined_mapping = copy.deepcopy( autoDQM )
1991 combined_mapping.update( autoValidation )
1992 self.
expandMapping(harvestingList,combined_mapping,index=-1)
1994 if len(set(harvestingList))!=len(harvestingList):
1995 harvestingList=
list(set(harvestingList))
1996 print(
"Duplicate entries for HARVESTING, using",harvestingList)
1998 for name
in harvestingList:
1999 if not name
in harvestingConfig.__dict__:
2000 print(name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2004 harvestingstream = getattr(harvestingConfig,name)
2005 if isinstance(harvestingstream,cms.Path):
2008 if isinstance(harvestingstream,cms.Sequence):
2009 setattr(self.
process,name+
"_step",cms.Path(harvestingstream))
2016 """ Enrich the process with AlCaHarvesting step """
2018 sequence=sequence.split(
".")[-1]
2021 harvestingList = sequence.split(
"+")
2025 from Configuration.AlCa.autoPCL
import autoPCL
2028 for name
in harvestingConfig.__dict__:
2029 harvestingstream = getattr(harvestingConfig,name)
2030 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2032 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2033 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2034 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2035 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2037 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2038 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2039 harvestingList.remove(name)
2041 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2044 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2045 print(
"The following harvesting could not be found : ", harvestingList)
2046 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2061 """ Add useful info for the production. """
2062 self.
process.configurationMetadata=cms.untracked.PSet\
2063 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2064 name=cms.untracked.string(
"Applications"),
2065 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2073 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n'
2075 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n"
2084 from Configuration.StandardSequences.Eras
import eras
2085 for requestedEra
in self.
_options.era.split(
",") :
2086 modifierStrings.append(requestedEra)
2087 modifierImports.append(eras.pythonCfgLines[requestedEra])
2088 modifiers.append(getattr(eras,requestedEra))
2094 for pm
in self.
_options.procModifiers.split(
','):
2095 modifierStrings.append(pm)
2096 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2097 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2103 if len(modifierStrings)>0:
2110 if len(modifiers)>0:
2118 def prepare(self, doChecking = False):
2119 """ Prepare the configuration string and add missing pieces."""
2131 outputModuleCfgCode=
""
2137 self.
pythonCfgCode +=
"# import of standard configurations\n"
2142 if not hasattr(self.
process,
"configurationMetadata"):
2164 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2165 tmpOut = cms.EndPath(output)
2166 setattr(self.
process,name+
'OutPath',tmpOut)
2175 for object
in self.
_options.inlineObjets.split(
','):
2178 if not hasattr(self.
process,object):
2179 print(
'cannot inline -'+object+
'- : not known')
2186 for path
in self.
process.paths:
2190 for endpath
in self.
process.endpaths:
2196 result =
"process.schedule = cms.Schedule("
2199 self.
process.schedule = cms.Schedule()
2201 if not isinstance(item, cms.Schedule):
2202 self.
process.schedule.append(item)
2204 self.
process.schedule.extend(item)
2206 if hasattr(self.
process,
"HLTSchedule"):
2209 pathNames = [
'process.'+p.label_()
for p
in beforeHLT]
2210 result +=
','.
join(pathNames)+
')\n'
2211 result +=
'process.schedule.extend(process.HLTSchedule)\n'
2212 pathNames = [
'process.'+p.label_()
for p
in afterHLT]
2213 result +=
'process.schedule.extend(['+
','.
join(pathNames)+
'])\n'
2215 pathNames = [
'process.'+p.label_()
for p
in self.
schedule]
2216 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n'
2221 self.
process.schedule.associate(getattr(self.
process, labelToAssociate))
2222 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n'
2226 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n"
2229 if self.
_options.nThreads
is not "1":
2232 self.
pythonCfgCode +=
"process.options.numberOfThreads=cms.untracked.uint32("+self.
_options.nThreads+
")\n"
2233 self.
pythonCfgCode +=
"process.options.numberOfStreams=cms.untracked.uint32("+self.
_options.nStreams+
")\n"
2234 self.
pythonCfgCode +=
"process.options.numberOfConcurrentLuminosityBlocks=cms.untracked.uint32("+self.
_options.nConcurrentLumis+
")\n"
2236 self.
pythonCfgCode +=
"if hasattr(process, 'DQMStore'): process.DQMStore.assertLegacySafe=cms.untracked.bool(False)\n"
2237 self.
process.options.numberOfThreads=cms.untracked.uint32(
int(self.
_options.nThreads))
2238 self.
process.options.numberOfStreams=cms.untracked.uint32(
int(self.
_options.nStreams))
2239 self.
process.options.numberOfConcurrentLuminosityBlocks=cms.untracked.uint32(
int(self.
_options.nConcurrentLumis))
2243 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2244 self.
pythonCfgCode +=
"MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n"
2245 MassReplaceInputTag(self.
process, new=
"rawDataMapperByLabel", old=
"rawDataCollector")
2249 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n'
2257 for path
in self.
process.paths:
2269 self.
pythonCfgCode+=
"#do not add changes to your config after this point (unless you know what you are doing)\n"
2270 self.
pythonCfgCode+=
"from FWCore.ParameterSet.Utilities import convertToUnscheduled\n"
2271 self.
pythonCfgCode+=
"process=convertToUnscheduled(process)\n"
2273 from FWCore.ParameterSet.Utilities
import convertToUnscheduled
2280 if hasattr(self.
process,
"logErrorHarvester"):
2282 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n"
2283 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n"
2284 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n"
2285 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2292 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n"
2293 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2294 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n"
2296 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2299 imports = cms.specialImportRegistry.getSpecialImports()
2300 if len(imports) > 0:
2312 if not self.
_options.io.endswith(
'.io'): self._option.io+=
'.io'
2315 if hasattr(self.
process.source,
"fileNames"):
2316 if len(self.
process.source.fileNames.value()):
2317 ioJson[
'primary']=self.
process.source.fileNames.value()
2318 if hasattr(self.
process.source,
"secondaryFileNames"):
2319 if len(self.
process.source.secondaryFileNames.value()):
2320 ioJson[
'secondary']=self.
process.source.secondaryFileNames.value()
2321 if self.
_options.pileup_input
and (self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:')):
2322 ioJson[
'pileup']=self.
_options.pileup_input[4:]
2324 ioJson[o]=om.fileName.value()
2325 ioJson[
'GT']=self.
process.GlobalTag.globaltag.value()
2329 io.write(json.dumps(ioJson))