3 from __future__
import print_function
4 __version__ =
"$Revision: 1.19 $"
5 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
7 import FWCore.ParameterSet.Config
as cms
8 from FWCore.ParameterSet.Modules
import _Module
12 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
18 from subprocess
import Popen,PIPE
19 import FWCore.ParameterSet.DictTypes
as DictTypes
25 defaultOptions.datamix =
'DataOnSim'
26 defaultOptions.isMC=
False
27 defaultOptions.isData=
True
28 defaultOptions.step=
''
29 defaultOptions.pileup=
'NoPileUp'
30 defaultOptions.pileup_input =
None
31 defaultOptions.pileup_dasoption =
''
32 defaultOptions.geometry =
'SimDB'
33 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
34 defaultOptions.magField =
''
35 defaultOptions.conditions =
None
36 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
37 defaultOptions.harvesting=
'AtRunEnd'
38 defaultOptions.gflash =
False
39 defaultOptions.number = -1
40 defaultOptions.number_out =
None
41 defaultOptions.arguments =
""
42 defaultOptions.name =
"NO NAME GIVEN"
43 defaultOptions.evt_type =
""
44 defaultOptions.filein =
""
45 defaultOptions.dasquery=
""
46 defaultOptions.dasoption=
""
47 defaultOptions.secondfilein =
""
48 defaultOptions.customisation_file = []
49 defaultOptions.customisation_file_unsch = []
50 defaultOptions.customise_commands =
""
51 defaultOptions.inline_custom=
False
52 defaultOptions.particleTable =
'pythiapdt'
53 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
54 defaultOptions.dirin =
''
55 defaultOptions.dirout =
''
56 defaultOptions.filetype =
'EDM'
57 defaultOptions.fileout =
'output.root'
58 defaultOptions.filtername =
''
59 defaultOptions.lazy_download =
False
60 defaultOptions.custom_conditions =
''
61 defaultOptions.hltProcess =
''
62 defaultOptions.eventcontent =
None
63 defaultOptions.datatier =
None
64 defaultOptions.inlineEventContent =
True
65 defaultOptions.inlineObjets =
''
66 defaultOptions.hideGen=
False
67 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
68 defaultOptions.beamspot=
None
69 defaultOptions.outputDefinition =
''
70 defaultOptions.inputCommands =
None
71 defaultOptions.outputCommands =
None
72 defaultOptions.inputEventContent =
''
73 defaultOptions.dropDescendant =
False
74 defaultOptions.relval =
None
75 defaultOptions.profile =
None
76 defaultOptions.isRepacked =
False
77 defaultOptions.restoreRNDSeeds =
False
78 defaultOptions.donotDropOnInput =
''
79 defaultOptions.python_filename =
''
80 defaultOptions.io=
None
81 defaultOptions.lumiToProcess=
None
82 defaultOptions.fast=
False
83 defaultOptions.runsAndWeightsForMC =
None
84 defaultOptions.runsScenarioForMC =
None
85 defaultOptions.runUnscheduled =
False
86 defaultOptions.timeoutOutput =
False
87 defaultOptions.nThreads =
'1'
88 defaultOptions.nStreams =
'0'
89 defaultOptions.nConcurrentLumis =
'1'
93 theObject = getattr(process,name)
94 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
95 return "process."+name+
" = " + theObject.dumpPython()
96 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
97 return "process."+name+
" = " + theObject.dumpPython()+
"\n"
99 return "process."+name+
" = " + theObject.dumpPython()+
"\n"
102 import FWCore.ParameterSet.Config
as cms
105 for line
in open(fileName,
'r'):
106 if line.count(
".root")>=2:
108 entries=line.replace(
"\n",
"").
split()
109 prim.append(entries[0])
110 sec.append(entries[1])
111 elif (line.find(
".root")!=-1):
112 entry=line.replace(
"\n",
"")
115 prim = sorted(list(set(prim)))
116 sec = sorted(list(set(sec)))
118 if not hasattr(s,
"fileNames"):
119 s.fileNames=cms.untracked.vstring(prim)
121 s.fileNames.extend(prim)
123 if not hasattr(s,
"secondaryFileNames"):
124 s.secondaryFileNames=cms.untracked.vstring(sec)
126 s.secondaryFileNames.extend(sec)
127 print(
"found files: ",prim)
129 raise Exception(
"There are not files in input from the file list")
131 print(
"found parent files:",sec)
136 import FWCore.ParameterSet.Config
as cms
139 print(
"the query is",query)
142 while eC!=0
and count<3:
144 print(
'Sleeping, then retrying DAS')
146 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True, universal_newlines=
True)
148 tupleP = os.waitpid(p.pid, 0)
152 print(
"DAS succeeded after",count,
"attempts",eC)
154 print(
"DAS failed 3 times- I give up")
155 for line
in pipe.split(
'\n'):
156 if line.count(
".root")>=2:
158 entries=line.replace(
"\n",
"").
split()
159 prim.append(entries[0])
160 sec.append(entries[1])
161 elif (line.find(
".root")!=-1):
162 entry=line.replace(
"\n",
"")
165 prim = sorted(list(set(prim)))
166 sec = sorted(list(set(sec)))
168 if not hasattr(s,
"fileNames"):
169 s.fileNames=cms.untracked.vstring(prim)
171 s.fileNames.extend(prim)
173 if not hasattr(s,
"secondaryFileNames"):
174 s.secondaryFileNames=cms.untracked.vstring(sec)
176 s.secondaryFileNames.extend(sec)
177 print(
"found files: ",prim)
179 print(
"found parent files:",sec)
182 def anyOf(listOfKeys,dict,opt=None):
191 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
194 """The main building routines """
196 def __init__(self, options, process = None, with_output = False, with_input = False ):
197 """options taken from old cmsDriver and optparse """
199 options.outfile_name = options.dirout+options.fileout
203 if self.
_options.isData
and options.isMC:
204 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
210 if (hasattr(self.
_options,
"outputDefinition")
and \
211 self.
_options.outputDefinition !=
'' and \
212 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self.
_options.outputDefinition)))
or \
213 (hasattr(self.
_options,
"datatier")
and \
216 print(
"removing ENDJOB from steps since not compatible with DQMIO dataTier")
222 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
225 for step
in self.
_options.step.split(
","):
226 if step==
'':
continue
227 stepParts = step.split(
":")
228 stepName = stepParts[0]
229 if stepName
not in stepList
and not stepName.startswith(
're'):
230 raise ValueError(
"Step "+stepName+
" unknown")
231 if len(stepParts)==1:
233 elif len(stepParts)==2:
235 elif len(stepParts)==3:
236 self.
stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
238 raise ValueError(
"Step definition "+step+
" invalid")
246 if hasattr(self.
_options,
"no_output_flag")
and self.
_options.no_output_flag:
273 Function to add the igprof profile service so that you can dump in the middle
276 profileOpts = self.
_options.profile.split(
':')
278 profilerInterval = 100
279 profilerFormat =
None
280 profilerJobFormat =
None
286 startEvent = profileOpts.pop(0)
287 if not startEvent.isdigit():
288 raise Exception(
"%s is not a number" % startEvent)
289 profilerStart =
int(startEvent)
291 eventInterval = profileOpts.pop(0)
292 if not eventInterval.isdigit():
293 raise Exception(
"%s is not a number" % eventInterval)
294 profilerInterval =
int(eventInterval)
296 profilerFormat = profileOpts.pop(0)
299 if not profilerFormat:
300 profilerFormat =
"%s___%s___%%I.gz" % (
301 self.
_options.evt_type.replace(
"_cfi",
""),
307 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
308 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
309 elif not profilerJobFormat:
310 profilerJobFormat = profilerFormat +
"_EndOfJob.gz"
312 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
315 includeFile = includeFile.replace(
'/',
'.')
317 return sys.modules[includeFile]
320 """helper routine to load am memorize imports"""
323 includeFile = includeFile.replace(
'/',
'.')
326 return sys.modules[includeFile]
329 """helper routine to remember replace statements"""
331 if not command.strip().startswith(
"#"):
334 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
339 self.
process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring(
'ProductNotFound'),fileMode = cms.untracked.string(
'FULLMERGE'))
341 self.
process.options = cms.untracked.PSet( )
346 self.
process.AdaptorConfig = cms.Service(
"AdaptorConfig",
347 stats = cms.untracked.bool(
True),
348 enable = cms.untracked.bool(
True),
349 cacheHint = cms.untracked.string(
"lazy-download"),
350 readHint = cms.untracked.string(
"read-ahead-buffered")
359 self.
process.IgProfService = cms.Service(
"IgProfService",
360 reportFirstEvent = cms.untracked.int32(start),
361 reportEventInterval = cms.untracked.int32(interval),
362 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
363 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
367 """Here we decide how many evts will be processed"""
368 self.
process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(
int(self.
_options.number)))
374 """Here the source is built. Priority: file, generator"""
377 def filesFromOption(self):
378 for entry
in self.
_options.filein.split(
','):
380 if entry.startswith(
"filelist:"):
382 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
387 if not hasattr(self.
process.source,
"secondaryFileNames"):
388 raise Exception(
"--secondfilein not compatible with "+self.
_options.filetype+
"input type")
389 for entry
in self.
_options.secondfilein.split(
','):
391 if entry.startswith(
"filelist:"):
393 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
396 self.
process.source.secondaryFileNames.append(self.
_options.dirin+entry)
400 self.
process.source=cms.Source(
"PoolSource",
401 fileNames = cms.untracked.vstring(),
402 secondaryFileNames= cms.untracked.vstring())
403 filesFromOption(self)
404 elif self.
_options.filetype ==
"DAT":
405 self.
process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
406 filesFromOption(self)
407 elif self.
_options.filetype ==
"LHE":
408 self.
process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
409 if self.
_options.filein.startswith(
"lhe:"):
411 args=self.
_options.filein.split(
':')
413 print(
'LHE input from article ',article)
414 location=
'/store/lhe/'
416 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
417 for line
in textOfFiles:
418 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
419 self.
process.source.fileNames.append(location+article+
'/'+fileName)
422 print(
'Issue to load LHE files, please check and try again.')
425 if len(self.
process.source.fileNames)==0:
426 print(
'Issue with empty filename, but can pass line check')
429 self.
process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
431 filesFromOption(self)
433 elif self.
_options.filetype ==
"DQM":
434 self.
process.source=cms.Source(
"DQMRootSource",
435 fileNames = cms.untracked.vstring())
436 filesFromOption(self)
438 elif self.
_options.filetype ==
"DQMDAQ":
440 self.
process.source=cms.Source(
"DQMStreamerReader")
444 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
447 self.
process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
451 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
456 self.
_options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,'
458 self.
_options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,'
461 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
462 for command
in self.
_options.inputCommands.split(
','):
464 command = command.strip()
465 if command==
'':
continue
466 self.
process.source.inputCommands.append(command)
467 if not self.
_options.dropDescendant:
468 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
471 import FWCore.PythonUtilities.LumiList
as LumiList
475 if self.
process.source
is None:
476 self.
process.source=cms.Source(
"EmptySource")
482 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
483 if self.
_options.runsAndWeightsForMC:
486 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
487 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMC], str):
488 __import__(RunsAndWeights[self.
_options.runsScenarioForMC])
489 self.
runsAndWeights = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMC]].runProbabilityDistribution
494 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
502 """ Add output module to the process """
506 print(
"--datatier & --eventcontent options ignored")
509 outList = eval(self.
_options.outputDefinition)
510 for (id,outDefDict)
in enumerate(outList):
511 outDefDictStr=outDefDict.__str__()
512 if not isinstance(outDefDict,dict):
513 raise Exception(
"--output needs to be passed a list of dict"+self.
_options.outputDefinition+
" is invalid")
515 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
518 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
519 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
520 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
521 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
522 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
524 if not theModuleLabel:
525 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
526 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
527 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output'
529 for name
in tryNames:
530 if not hasattr(self.
process,name):
533 if not theModuleLabel:
534 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
536 defaultFileName=self.
_options.outfile_name
538 defaultFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
540 theFileName=self.
_options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
541 if not theFileName.endswith(
'.root'):
545 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
546 if theStreamType==
'DQMIO': theStreamType=
'DQM'
547 if theStreamType==
'ALL':
548 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
550 theEventContent = getattr(self.
process, theStreamType+
"EventContent")
554 if theStreamType==
'ALCARECO' and not theFilterName:
555 theFilterName=
'StreamALCACombined'
558 CppType=
'PoolOutputModule'
560 CppType=
'TimeoutPoolOutputModule'
561 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule'
562 output = cms.OutputModule(CppType,
563 theEventContent.clone(),
564 fileName = cms.untracked.string(theFileName),
565 dataset = cms.untracked.PSet(
566 dataTier = cms.untracked.string(theTier),
567 filterName = cms.untracked.string(theFilterName))
569 if not theSelectEvent
and hasattr(self.
process,
'generation_step')
and theStreamType!=
'LHE':
570 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
571 if not theSelectEvent
and hasattr(self.
process,
'filtering_step'):
572 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
574 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
577 if not hasattr(output,
'SelectEvents'):
578 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
580 output.SelectEvents.SelectEvents.extend(getattr(self.
process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
583 if hasattr(self.
process,theModuleLabel):
584 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
586 setattr(self.
process,theModuleLabel,output)
587 outputModule=getattr(self.
process,theModuleLabel)
588 setattr(self.
process,theModuleLabel+
'_step',cms.EndPath(outputModule))
589 path=getattr(self.
process,theModuleLabel+
'_step')
592 if not self.
_options.inlineEventContent
and hasattr(self.
process,theStreamType+
"EventContent"):
593 def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process.
"+theStreamType+"EventContent.outputCommands)
"): return label
594 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
595 if theExtraOutputCommands:
596 if not isinstance(theExtraOutputCommands,list):
597 raise Exception(
"extra ouput command in --option must be a list of strings")
598 if hasattr(self.
process,theStreamType+
"EventContent"):
599 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
601 outputModule.outputCommands.extend(theExtraOutputCommands)
603 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
608 streamTypes=self.
_options.eventcontent.split(
',')
609 tiers=self.
_options.datatier.split(
',')
610 if not self.
_options.outputDefinition
and len(streamTypes)!=len(tiers):
611 raise Exception(
"number of event content arguments does not match number of datatier arguments")
617 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
618 if streamType==
'':
continue
619 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self.
_options.step:
continue
620 if streamType==
'DQMIO': streamType=
'DQM'
621 eventContent=streamType
623 if streamType ==
"NANOEDMAOD" :
624 eventContent =
"NANOAOD"
625 elif streamType ==
"NANOEDMAODSIM" :
626 eventContent =
"NANOAODSIM"
627 theEventContent = getattr(self.
process, eventContent+
"EventContent")
629 theFileName=self.
_options.outfile_name
630 theFilterName=self.
_options.filtername
632 theFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
633 theFilterName=self.
_options.filtername
634 CppType=
'PoolOutputModule'
636 CppType=
'TimeoutPoolOutputModule'
637 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule'
638 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule'
639 output = cms.OutputModule(CppType,
641 fileName = cms.untracked.string(theFileName),
642 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
643 filterName = cms.untracked.string(theFilterName)
646 if hasattr(self.
process,
"generation_step")
and streamType!=
'LHE':
647 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
648 if hasattr(self.
process,
"filtering_step"):
649 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
651 if streamType==
'ALCARECO':
652 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
654 if "MINIAOD" in streamType:
655 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
658 outputModuleName=streamType+
'output'
659 setattr(self.
process,outputModuleName,output)
660 outputModule=getattr(self.
process,outputModuleName)
661 setattr(self.
process,outputModuleName+
'_step',cms.EndPath(outputModule))
662 path=getattr(self.
process,outputModuleName+
'_step')
665 if self.
_options.outputCommands
and streamType!=
'DQM':
666 for evct
in self.
_options.outputCommands.split(
','):
667 if not evct:
continue
668 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
670 if not self.
_options.inlineEventContent:
671 tmpstreamType=streamType
672 if "NANOEDM" in tmpstreamType :
673 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
674 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
676 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
678 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
684 Add selected standard sequences to the process
688 pileupSpec=self.
_options.pileup.split(
',')[0]
691 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
692 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
693 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
697 if '.' in pileupSpec:
698 mixingDict={
'file':pileupSpec}
699 elif pileupSpec.startswith(
'file:'):
700 mixingDict={
'file':pileupSpec[5:]}
703 mixingDict=copy.copy(Mixing[pileupSpec])
704 if len(self.
_options.pileup.split(
','))>1:
705 mixingDict.update(eval(self.
_options.pileup[self.
_options.pileup.find(
',')+1:]))
708 if 'file:' in pileupSpec:
711 print(
"inlining mixing module configuration")
716 mixingDict.pop(
'file')
719 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
721 elif self.
_options.pileup_input.startswith(
"filelist:"):
724 mixingDict[
'F']=self.
_options.pileup_input.split(
',')
726 for command
in specialization:
728 if len(mixingDict)!=0:
729 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
748 stepSpec = self.
stepMap[stepName]
749 print(
"Step:", stepName,
"Spec:",stepSpec)
750 if stepName.startswith(
're'):
752 if stepName[2:]
not in self.
_options.donotDropOnInput:
753 self.
_options.inputEventContent=
'%s,%s'%(stepName.upper(),self.
_options.inputEventContent)
754 stepName=stepName[2:]
756 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
757 elif isinstance(stepSpec, list):
758 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
759 elif isinstance(stepSpec, tuple):
760 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
762 raise ValueError(
"Invalid step definition")
764 if self.
_options.restoreRNDSeeds!=
False:
766 if self.
_options.restoreRNDSeeds==
True:
767 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
769 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self.
_options.restoreRNDSeeds))
772 self.
_options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,'
774 self.
_options.inputCommands=
'keep *_randomEngineStateProducer_*_*,'
780 def dropSecondDropStar(iec):
790 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
791 for evct
in self.
_options.inputEventContent.split(
','):
792 if evct==
'':
continue
793 theEventContent = getattr(self.
process, evct+
"EventContent")
794 if hasattr(theEventContent,
'outputCommands'):
795 self.
process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
796 if hasattr(theEventContent,
'inputCommands'):
797 self.
process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
799 dropSecondDropStar(self.
process.source.inputCommands)
801 if not self.
_options.dropDescendant:
802 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
808 """Add conditions to the process"""
809 if not self.
_options.conditions:
return
811 if 'FrontierConditions_GlobalTag' in self.
_options.conditions:
812 print(
'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
813 self.
_options.conditions = self.
_options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
816 from Configuration.AlCa.GlobalTag
import GlobalTag
823 """Include the customise code """
827 for c
in self.
_options.customisation_file:
828 custOpt.extend(c.split(
","))
830 for c
in self.
_options.customisation_file_unsch:
831 custOpt.extend(c.split(
","))
837 raise Exception(
"more than . in the specification:"+opt)
838 fileName=opt.split(
'.')[0]
839 if opt.count(
'.')==0: rest=
'customise'
841 rest=opt.split(
'.')[1]
842 if rest==
'py': rest=
'customise'
844 if fileName
in custMap:
845 custMap[fileName].extend(rest.split(
'+'))
847 custMap[fileName]=rest.split(
'+')
852 final_snippet=
'\n# customisation of the process.\n'
856 allFcn.extend(custMap[opt])
858 if allFcn.count(fcn)!=1:
859 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
863 packageName = f.replace(
".py",
"").
replace(
"/",
".")
864 __import__(packageName)
865 package = sys.modules[packageName]
868 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
870 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n'
872 for line
in file(customiseFile,
'r'):
873 if "import FWCore.ParameterSet.Config" in line:
875 final_snippet += line
877 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
878 for fcn
in custMap[f]:
879 print(
"customising the process with",fcn,
"from",f)
880 if not hasattr(package,fcn):
882 raise Exception(
"config "+f+
" has no function "+fcn)
886 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
887 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
890 final_snippet +=
'\n# End of customisation functions\n'
896 final_snippet=
'\n# Customisation from command line\n'
897 if self.
_options.customise_commands:
899 for com
in self.
_options.customise_commands.split(
'\\n'):
902 final_snippet +=
'\n'+com
913 if self.
_options.particleTable
not in defaultOptions.particleTableList:
914 print(
'Invalid particle table provided. Options are:')
915 print(defaultOptions.particleTable)
923 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreams_cff"
957 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff'
996 self.
_options.beamspot=VtxSmearedDefaultKey
1001 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff"
1002 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff"
1005 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
1014 if self.
_options.scenario==
'cosmics':
1016 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff"
1017 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff"
1018 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff"
1028 if self.
_options.scenario==
'HeavyIons':
1030 self.
_options.beamspot=VtxSmearedHIDefaultKey
1035 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1037 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1040 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff"
1052 self.
magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self.
_options.magField.replace(
'.',
'')+
'_cff'
1056 self.
GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff'
1060 if 'start' in self.
_options.conditions.lower():
1061 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff'
1063 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff'
1065 def inGeometryKeys(opt):
1066 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1067 if opt
in GeometryConf:
1068 return GeometryConf[opt]
1072 geoms=self.
_options.geometry.split(
',')
1073 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).
split(
',')
1076 if '/' in geoms[1]
or '_cff' in geoms[1]:
1079 self.
GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff'
1081 if (geoms[0].startswith(
'DB:')):
1082 self.
SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff'
1086 if '/' in geoms[0]
or '_cff' in geoms[0]:
1089 simGeometry=geoms[0]
1091 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff'
1093 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff'
1096 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1097 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff"
1100 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff"
1105 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff'
1106 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1113 if self.
_options.pileup==
'default':
1114 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1115 self.
_options.pileup=MixingDefaultKey
1128 output = cms.OutputModule(
"PoolOutputModule")
1129 if stream.selectEvents.parameters_().__len__()!=0:
1130 output.SelectEvents = stream.selectEvents
1132 output.SelectEvents = cms.untracked.PSet()
1133 output.SelectEvents.SelectEvents=cms.vstring()
1134 if isinstance(stream.paths,tuple):
1135 for path
in stream.paths:
1136 output.SelectEvents.SelectEvents.append(path.label())
1138 output.SelectEvents.SelectEvents.append(stream.paths.label())
1142 if isinstance(stream.content,str):
1143 evtPset=getattr(self.process,stream.content)
1144 for p
in evtPset.parameters_():
1145 setattr(output,p,getattr(evtPset,p))
1146 if not self._options.inlineEventContent:
1147 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1149 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1151 output.outputCommands = stream.content
1154 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1156 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1157 filterName = cms.untracked.string(stream.name))
1159 if self._options.filtername:
1160 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1163 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1165 if workflow
in (
"producers,full"):
1166 if isinstance(stream.paths,tuple):
1167 for path
in stream.paths:
1168 self.schedule.
append(path)
1170 self.schedule.
append(stream.paths)
1174 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1175 self.additionalOutputs[name] = output
1176 setattr(self.process,name,output)
1178 if workflow ==
'output':
1180 filterList = output.SelectEvents.SelectEvents
1181 for i, filter
in enumerate(filterList):
1182 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1192 if ( len(sequence.split(
'.'))==1 ):
1194 elif ( len(sequence.split(
'.'))==2 ):
1196 sequence=sequence.split(
'.')[1]
1198 print(
"sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1199 print(sequence,
"not recognized")
1206 for i,s
in enumerate(seq.split(
'*')):
1208 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, s) ))
1210 p=getattr(self.
process,prefix)
1211 tmp = getattr(self.
process, s)
1212 if isinstance(tmp, cms.Task):
1223 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, seq) ))
1226 for i,s
in enumerate(seq.split(
'+')):
1228 setattr(self.
process,sn,getattr(cms,what)( getattr(self.
process, s) ))
1242 def prepare_ALCA(self, sequence = None, workflow = 'full'):
1243 """ Enrich the process with alca streams """
1245 sequence = sequence.split(
'.')[-1]
1248 alcaList = sequence.split(
"+")
1250 from Configuration.AlCa.autoAlca
import autoAlca
1254 for name
in alcaConfig.__dict__:
1255 alcastream = getattr(alcaConfig,name)
1256 shortName = name.replace(
'ALCARECOStream',
'')
1257 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1258 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1259 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1261 if 'DQM' in alcaList:
1262 if not self.
_options.inlineEventContent
and hasattr(self.
process,name):
1263 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1265 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1269 if isinstance(alcastream.paths,tuple):
1270 for path
in alcastream.paths:
1275 for i
in range(alcaList.count(shortName)):
1276 alcaList.remove(shortName)
1279 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1280 path = getattr(alcaConfig,name)
1282 alcaList.remove(
'DQM')
1284 if isinstance(alcastream,cms.Path):
1289 if len(alcaList) != 0:
1291 for name
in alcaConfig.__dict__:
1292 alcastream = getattr(alcaConfig,name)
1293 if isinstance(alcastream,cms.FilteredStream):
1294 available.append(name.replace(
'ALCARECOStream',
''))
1295 print(
"The following alcas could not be found "+
str(alcaList))
1296 print(
"available ",available)
1298 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1303 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1304 print(
"Loading lhe fragment from",loadFragment)
1305 __import__(loadFragment)
1306 self.process.
load(loadFragment)
1308 self._options.inlineObjets+=
','+sequence
1310 getattr(self.process,sequence).nEvents =
int(self._options.number)
1313 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1314 self.excludedPaths.
append(
"lhe_step")
1315 self.schedule.
append( self.process.lhe_step )
1318 """ load the fragment of generator configuration """
1325 if not '/' in loadFragment:
1326 loadFragment=
'Configuration.Generator.'+loadFragment
1328 loadFragment=loadFragment.replace(
'/',
'.')
1330 print(
"Loading generator fragment from",loadFragment)
1331 __import__(loadFragment)
1336 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1339 generatorModule=sys.modules[loadFragment]
1340 genModules=generatorModule.__dict__
1351 import FWCore.ParameterSet.Modules
as cmstypes
1352 for name
in genModules:
1353 theObject = getattr(generatorModule,name)
1354 if isinstance(theObject, cmstypes._Module):
1356 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1357 self.
_options.inlineObjets+=
','+name
1359 if sequence == self.
GENDefaultSeq or sequence ==
'pgen_genonly':
1360 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1362 elif 'generator' in genModules:
1365 """ Enrich the schedule with the rest of the generation step """
1367 genSeqName=sequence.split(
'.')[-1]
1371 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1372 cffToBeLoaded=VtxSmeared[self.
_options.beamspot]
1375 raise Exception(
"VertexSmearing type or beamspot "+self.
_options.beamspot+
" unknown.")
1377 if self.
_options.scenario ==
'HeavyIons':
1378 if self.
_options.pileup==
'HiMixGEN':
1379 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1380 elif self.
_options.pileup==
'HiMixEmbGEN':
1381 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorEmbMix_cff")
1383 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1385 self.
process.generation_step = cms.Path( getattr(self.
process,genSeqName) )
1389 self.
executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1395 """ Enrich the schedule with the summary of the filter step """
1402 """ Enrich the schedule with the simulation step"""
1412 self.
executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1418 """ Enrich the schedule with the digitisation step"""
1422 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1424 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1425 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1427 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and sequence !=
'pdigi_hi_nogen' and not self.
process.source.type_()==
'EmptySource' and not self.
_options.filetype ==
"LHE":
1428 if self.
_options.inputEventContent==
'':
1429 self.
_options.inputEventContent=
'REGEN'
1438 """ Enrich the schedule with the crossing frame writer step"""
1444 """ Enrich the schedule with the digitisation step"""
1450 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
1452 elif self.
_options.pileup_input.startswith(
"filelist:"):
1455 theFiles=self.
_options.pileup_input.split(
',')
1457 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1472 """ Enrich the schedule with the L1 simulation step"""
1479 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1480 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1481 if sequence
in supported:
1482 self.
loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1483 if self.
_options.scenario ==
'HeavyIons':
1487 print(
"L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported)
1492 """ Enrich the schedule with the HLT simulation step"""
1494 print(
"no specification of the hlt menu has been given, should never happen")
1495 raise Exception(
'no HLT sequence provided')
1499 from Configuration.HLT.autoHLT
import autoHLT
1502 sequence = autoHLT[key]
1504 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1510 if self.
_options.scenario ==
'HeavyIons':
1511 optionsForHLT[
'type'] =
'HIon'
1513 optionsForHLT[
'type'] =
'GRun'
1514 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in six.iteritems(optionsForHLT))
1515 if sequence ==
'run,fromSource':
1516 if hasattr(self.
process.source,
'firstRun'):
1517 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1518 elif hasattr(self.
process.source,
'setRunNumber'):
1519 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1521 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1523 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1528 self.
_options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1534 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1542 if not hasattr(self.
process,
'HLTEndSequence'):
1543 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1548 seqReco=sequence.split(
',')[1]
1549 seqDigi=sequence.split(
',')[0]
1551 print(
"RAW2RECO requires two specifications",sequence,
"insufficient")
1567 for filt
in allMetFilterPaths:
1571 ''' Enrich the schedule with L1 HW validation '''
1574 print(
'\n\n\n DEPRECATED this has no action \n\n\n')
1578 ''' Enrich the schedule with L1 reconstruction '''
1584 ''' Enrich the schedule with L1 reconstruction '''
1590 ''' Enrich the schedule with a user defined filter sequence '''
1592 filterConfig=self.
load(sequence.split(
'.')[0])
1593 filterSeq=sequence.split(
'.')[-1]
1595 class PrintAllModules(
object):
1599 def enter(self,visitee):
1601 label=visitee.label()
1606 def leave(self,v):
pass
1608 expander=PrintAllModules()
1609 getattr(self.
process,filterSeq).visit( expander )
1610 self.
_options.inlineObjets+=
','+expander.inliner
1611 self.
_options.inlineObjets+=
','+filterSeq
1622 ''' Enrich the schedule with reconstruction '''
1628 ''' Enrich the schedule with reconstruction '''
1634 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1636 print(
"ERROR: this step is only implemented for FastSim")
1639 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1643 ''' Enrich the schedule with PAT '''
1648 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1651 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1653 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1656 if len(self.
_options.customise_commands) > 1:
1657 self.
_options.customise_commands = self.
_options.customise_commands +
" \n"
1658 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patTrigger.processName = \""+self.
_options.hltProcess+
"\"\n"
1659 self.
_options.customise_commands = self.
_options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n"
1660 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n"
1667 ''' Enrich the schedule with PATGEN '''
1671 raise Exception(
"PATGEN step can only run on MC")
1675 ''' Enrich the schedule with NANO '''
1678 custom =
"nanoAOD_customizeData" if self.
_options.isData
else "nanoAOD_customizeMC"
1679 self.
_options.customisation_file.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1681 if len(self.
_options.customise_commands) > 1:
1682 self.
_options.customise_commands = self.
_options.customise_commands +
" \n"
1683 self.
_options.customise_commands = self.
_options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n"
1686 ''' Enrich the schedule with NANOGEN '''
1688 fromGen =
any([x
in self.
stepMap for x
in [
'LHE',
'GEN',
'AOD']])
1691 custom =
"customizeNanoGEN" if fromGen
else "customizeNanoGENFromMini"
1698 ''' Enrich the schedule with event interpretation '''
1699 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1700 if sequence
in EventInterpretation:
1702 sequence =
'EIsequence'
1704 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1710 ''' Enrich the schedule with skimming fragments'''
1712 sequence = sequence.split(
'.')[-1]
1714 skimlist=sequence.split(
'+')
1716 from Configuration.Skimming.autoSkim
import autoSkim
1720 for skim
in skimConfig.__dict__:
1721 skimstream = getattr(skimConfig,skim)
1722 if isinstance(skimstream,cms.Path):
1725 if (
not isinstance(skimstream,cms.FilteredStream)):
1727 shortname = skim.replace(
'SKIMStream',
'')
1728 if (sequence==
"all"):
1730 elif (shortname
in skimlist):
1735 skimstreamDQM = cms.FilteredStream(
1736 responsible = skimstream.responsible,
1737 name = skimstream.name+
'DQM',
1738 paths = skimstream.paths,
1739 selectEvents = skimstream.selectEvents,
1740 content = self.
_options.datatier+
'EventContent',
1741 dataTier = cms.untracked.string(self.
_options.datatier)
1744 for i
in range(skimlist.count(shortname)):
1745 skimlist.remove(shortname)
1749 if (skimlist.__len__()!=0
and sequence!=
"all"):
1750 print(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1751 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1754 ''' Enrich the schedule with a user defined sequence '''
1760 """ Enrich the schedule with the postreco step """
1767 print(sequence,
"in preparing validation")
1769 from Validation.Configuration.autoValidation
import autoValidation
1771 sequence=sequence.split(
'.')[-1]
1772 if sequence.find(
',')!=-1:
1773 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1774 valSeqName=sequence.split(
',')[1].
split(
'+')
1779 prevalSeqName=sequence.split(
'+')
1780 valSeqName=sequence.split(
'+')
1786 postfix=
'_'+sequence
1787 prevalSeqName=[
'prevalidation'+postfix]
1788 valSeqName=[
'validation'+postfix]
1789 if not hasattr(self.
process,valSeqName[0]):
1791 valSeqName=[sequence]
1803 for s
in valSeqName+prevalSeqName:
1806 for (i,s)
in enumerate(prevalSeqName):
1808 setattr(self.
process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.
process, s)) )
1811 for (i,s)
in enumerate(valSeqName):
1812 setattr(self.
process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.
process, s)))
1819 if not 'DIGI' in self.
stepMap and not self.
_options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1820 if self.
_options.restoreRNDSeeds==
False and not self.
_options.restoreRNDSeeds==
True:
1827 self.
_options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1829 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
1831 for (i,s)
in enumerate(valSeqName):
1838 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1839 It will climb down within PSets, VPSets and VInputTags to find its target"""
1840 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1846 def doIt(self,pset,base):
1847 if isinstance(pset, cms._Parameterizable):
1848 for name
in pset.parameters_().
keys():
1854 value = getattr(pset,name)
1855 type = value.pythonTypeName()
1856 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1857 self.
doIt(value,base+
"."+name)
1858 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1859 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1860 elif type
in (
'cms.string',
'cms.untracked.string'):
1864 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1865 for (i,n)
in enumerate(value):
1866 if not isinstance(n, cms.InputTag):
1873 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1874 for (i,n)
in enumerate(value):
1877 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1880 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1882 def enter(self,visitee):
1885 label = visitee.label()
1886 except AttributeError:
1887 label =
'<Module not in a Process>'
1889 label =
'other execption'
1890 self.
doIt(visitee, label)
1892 def leave(self,visitee):
1897 print(
"Replacing all InputTag %s => %s"%(oldT,newT))
1900 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1903 self.
additionalCommands.
append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1911 if proc==HLTprocess:
return
1913 print(
"replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc))
1915 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.
additionalCommands:
1917 self.
additionalCommands.
append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1923 while '@' in repr(seqList)
and level<maxLevel:
1925 for specifiedCommand
in seqList:
1926 if specifiedCommand.startswith(
'@'):
1927 location=specifiedCommand[1:]
1928 if not location
in mapping:
1929 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1930 mappedTo=mapping[location]
1932 mappedTo=mappedTo[index]
1933 seqList.remove(specifiedCommand)
1934 seqList.extend(mappedTo.split(
'+'))
1937 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1945 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1946 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1947 from DQMOffline.Configuration.autoDQM
import autoDQM
1951 if len(set(sequenceList))!=len(sequenceList):
1952 sequenceList=list(set(sequenceList))
1953 print(
"Duplicate entries for DQM:, using",sequenceList)
1955 pathName=
'dqmoffline_step'
1956 for (i,sequence)
in enumerate(sequenceList):
1958 pathName=
'dqmoffline_%d_step'%(i)
1963 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process,sequence ) ) )
1966 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
1971 pathName=
'dqmofflineOnPAT_step'
1972 for (i,sequence)
in enumerate(postSequenceList):
1974 if (sequenceList[i]==postSequenceList[i]):
1977 pathName=
'dqmofflineOnPAT_%d_step'%(i)
1979 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process, sequence ) ) )
1983 """ Enrich the process with harvesting step """
1984 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self.
_options.harvesting+
'_cff'
1988 sequence = sequence.split(
'.')[-1]
1991 harvestingList = sequence.split(
"+")
1992 from DQMOffline.Configuration.autoDQM
import autoDQM
1993 from Validation.Configuration.autoValidation
import autoValidation
1995 combined_mapping = copy.deepcopy( autoDQM )
1996 combined_mapping.update( autoValidation )
1997 self.
expandMapping(harvestingList,combined_mapping,index=-1)
1999 if len(set(harvestingList))!=len(harvestingList):
2000 harvestingList=list(set(harvestingList))
2001 print(
"Duplicate entries for HARVESTING, using",harvestingList)
2003 for name
in harvestingList:
2004 if not name
in harvestingConfig.__dict__:
2005 print(name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2009 harvestingstream = getattr(harvestingConfig,name)
2010 if isinstance(harvestingstream,cms.Path):
2013 if isinstance(harvestingstream,cms.Sequence):
2014 setattr(self.
process,name+
"_step",cms.Path(harvestingstream))
2021 """ Enrich the process with AlCaHarvesting step """
2023 sequence=sequence.split(
".")[-1]
2026 harvestingList = sequence.split(
"+")
2030 from Configuration.AlCa.autoPCL
import autoPCL
2033 for name
in harvestingConfig.__dict__:
2034 harvestingstream = getattr(harvestingConfig,name)
2035 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2037 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2038 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2039 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2040 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2042 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2043 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2044 harvestingList.remove(name)
2046 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2049 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2050 print(
"The following harvesting could not be found : ", harvestingList)
2051 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2066 """ Add useful info for the production. """
2067 self.
process.configurationMetadata=cms.untracked.PSet\
2068 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2069 name=cms.untracked.string(
"Applications"),
2070 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2078 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n'
2080 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n"
2089 from Configuration.StandardSequences.Eras
import eras
2090 for requestedEra
in self.
_options.era.split(
",") :
2091 modifierStrings.append(requestedEra)
2092 modifierImports.append(eras.pythonCfgLines[requestedEra])
2093 modifiers.append(getattr(eras,requestedEra))
2099 for pm
in self.
_options.procModifiers.split(
','):
2100 modifierStrings.append(pm)
2101 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2102 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2108 if len(modifierStrings)>0:
2115 if len(modifiers)>0:
2123 def prepare(self, doChecking = False):
2124 """ Prepare the configuration string and add missing pieces."""
2136 outputModuleCfgCode=
""
2142 self.
pythonCfgCode +=
"# import of standard configurations\n"
2147 if not hasattr(self.
process,
"configurationMetadata"):
2169 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2170 tmpOut = cms.EndPath(output)
2171 setattr(self.
process,name+
'OutPath',tmpOut)
2180 for object
in self.
_options.inlineObjets.split(
','):
2183 if not hasattr(self.
process,object):
2184 print(
'cannot inline -'+object+
'- : not known')
2189 if self.
_options.pileup==
'HiMixEmbGEN':
2190 self.
pythonCfgCode +=
"\nprocess.generator.embeddingMode=cms.bool(True)\n"
2194 for path
in self.
process.paths:
2198 for endpath
in self.
process.endpaths:
2204 result =
"process.schedule = cms.Schedule("
2207 self.
process.schedule = cms.Schedule()
2209 if not isinstance(item, cms.Schedule):
2210 self.
process.schedule.append(item)
2212 self.
process.schedule.extend(item)
2214 if hasattr(self.
process,
"HLTSchedule"):
2217 pathNames = [
'process.'+p.label_()
for p
in beforeHLT]
2218 result +=
','.
join(pathNames)+
')\n'
2219 result +=
'process.schedule.extend(process.HLTSchedule)\n'
2220 pathNames = [
'process.'+p.label_()
for p
in afterHLT]
2221 result +=
'process.schedule.extend(['+
','.
join(pathNames)+
'])\n'
2223 pathNames = [
'process.'+p.label_()
for p
in self.
schedule]
2224 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n'
2229 self.
process.schedule.associate(getattr(self.
process, labelToAssociate))
2230 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n'
2234 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n"
2237 if self.
_options.nThreads
is not "1":
2240 self.
pythonCfgCode +=
"process.options.numberOfThreads=cms.untracked.uint32("+self.
_options.nThreads+
")\n"
2241 self.
pythonCfgCode +=
"process.options.numberOfStreams=cms.untracked.uint32("+self.
_options.nStreams+
")\n"
2242 self.
pythonCfgCode +=
"process.options.numberOfConcurrentLuminosityBlocks=cms.untracked.uint32("+self.
_options.nConcurrentLumis+
")\n"
2244 self.
pythonCfgCode +=
"if hasattr(process, 'DQMStore'): process.DQMStore.assertLegacySafe=cms.untracked.bool(False)\n"
2245 self.
process.options.numberOfThreads=cms.untracked.uint32(
int(self.
_options.nThreads))
2246 self.
process.options.numberOfStreams=cms.untracked.uint32(
int(self.
_options.nStreams))
2247 self.
process.options.numberOfConcurrentLuminosityBlocks=cms.untracked.uint32(
int(self.
_options.nConcurrentLumis))
2251 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2252 self.
pythonCfgCode +=
"MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n"
2253 MassReplaceInputTag(self.
process, new=
"rawDataMapperByLabel", old=
"rawDataCollector")
2257 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n'
2265 for path
in self.
process.paths:
2275 print(
"--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2284 if hasattr(self.
process,
"logErrorHarvester"):
2286 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n"
2287 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n"
2288 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n"
2289 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2296 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n"
2297 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2298 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n"
2300 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2303 imports = cms.specialImportRegistry.getSpecialImports()
2304 if len(imports) > 0:
2316 if not self.
_options.io.endswith(
'.io'): self._option.io+=
'.io'
2319 if hasattr(self.
process.source,
"fileNames"):
2320 if len(self.
process.source.fileNames.value()):
2321 ioJson[
'primary']=self.
process.source.fileNames.value()
2322 if hasattr(self.
process.source,
"secondaryFileNames"):
2323 if len(self.
process.source.secondaryFileNames.value()):
2324 ioJson[
'secondary']=self.
process.source.secondaryFileNames.value()
2325 if self.
_options.pileup_input
and (self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:')):
2326 ioJson[
'pileup']=self.
_options.pileup_input[4:]
2328 ioJson[o]=om.fileName.value()
2329 ioJson[
'GT']=self.
process.GlobalTag.globaltag.value()
2333 io.write(json.dumps(ioJson))