3 __version__ =
"$Revision: 1.19 $" 4 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 6 import FWCore.ParameterSet.Config
as cms
7 from FWCore.ParameterSet.Modules
import _Module
12 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
17 from subprocess
import Popen,PIPE
18 import FWCore.ParameterSet.DictTypes
as DictTypes
24 defaultOptions.datamix =
'DataOnSim' 25 defaultOptions.isMC=
False 26 defaultOptions.isData=
True 27 defaultOptions.step=
'' 28 defaultOptions.pileup=
'NoPileUp' 29 defaultOptions.pileup_input =
None 30 defaultOptions.pileup_dasoption =
'' 31 defaultOptions.geometry =
'SimDB' 32 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
33 defaultOptions.magField =
'' 34 defaultOptions.conditions =
None 35 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
36 defaultOptions.harvesting=
'AtRunEnd' 37 defaultOptions.gflash =
False 38 defaultOptions.number = -1
39 defaultOptions.number_out =
None 40 defaultOptions.arguments =
"" 41 defaultOptions.name =
"NO NAME GIVEN" 42 defaultOptions.evt_type =
"" 43 defaultOptions.filein =
"" 44 defaultOptions.dasquery=
"" 45 defaultOptions.dasoption=
"" 46 defaultOptions.secondfilein =
"" 47 defaultOptions.customisation_file = []
48 defaultOptions.customisation_file_unsch = []
49 defaultOptions.customise_commands =
"" 50 defaultOptions.inline_custom=
False 51 defaultOptions.particleTable =
'pythiapdt' 52 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
53 defaultOptions.dirin =
'' 54 defaultOptions.dirout =
'' 55 defaultOptions.filetype =
'EDM' 56 defaultOptions.fileout =
'output.root' 57 defaultOptions.filtername =
'' 58 defaultOptions.lazy_download =
False 59 defaultOptions.custom_conditions =
'' 60 defaultOptions.hltProcess =
'' 61 defaultOptions.eventcontent =
None 62 defaultOptions.datatier =
None 63 defaultOptions.inlineEventContent =
True 64 defaultOptions.inlineObjets =
'' 65 defaultOptions.hideGen=
False 66 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
67 defaultOptions.beamspot=
None 68 defaultOptions.outputDefinition =
'' 69 defaultOptions.inputCommands =
None 70 defaultOptions.outputCommands =
None 71 defaultOptions.inputEventContent =
'' 72 defaultOptions.dropDescendant =
False 73 defaultOptions.relval =
None 74 defaultOptions.profile =
None 75 defaultOptions.isRepacked =
False 76 defaultOptions.restoreRNDSeeds =
False 77 defaultOptions.donotDropOnInput =
'' 78 defaultOptions.python_filename =
'' 79 defaultOptions.io=
None 80 defaultOptions.lumiToProcess=
None 81 defaultOptions.fast=
False 82 defaultOptions.runsAndWeightsForMC =
None 83 defaultOptions.runsScenarioForMC =
None 84 defaultOptions.runUnscheduled =
False 85 defaultOptions.timeoutOutput =
False 86 defaultOptions.nThreads =
'1' 90 theObject = getattr(process,name)
91 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
92 return "process."+name+
" = " + theObject.dumpPython(
"process")
93 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
94 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 96 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 99 import FWCore.ParameterSet.Config
as cms
102 for line
in open(fileName,
'r'): 103 if line.count(
".root")>=2:
105 entries=line.replace(
"\n",
"").
split()
106 if not entries[0]
in prim:
107 prim.append(entries[0])
108 if not entries[1]
in sec:
109 sec.append(entries[1])
110 elif (line.find(
".root")!=-1):
111 entry=line.replace(
"\n",
"")
112 if not entry
in prim:
115 if not hasattr(s,
"fileNames"):
116 s.fileNames=cms.untracked.vstring(prim)
118 s.fileNames.extend(prim)
120 if not hasattr(s,
"secondaryFileNames"):
121 s.secondaryFileNames=cms.untracked.vstring(sec)
123 s.secondaryFileNames.extend(sec)
124 print "found files: ",prim
126 raise Exception(
"There are not files in input from the file list")
128 print "found parent files:",sec
133 import FWCore.ParameterSet.Config
as cms
136 print "the query is",query
139 while eC!=0
and count<3:
141 print 'Sleeping, then retrying DAS' 143 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True)
145 tupleP = os.waitpid(p.pid, 0)
149 print "DAS succeeded after",count,
"attempts",eC
151 print "DAS failed 3 times- I give up" 152 for line
in pipe.split(
'\n'):
153 if line.count(
".root")>=2:
155 entries=line.replace(
"\n",
"").
split()
156 if not entries[0]
in prim:
157 prim.append(entries[0])
158 if not entries[1]
in sec:
159 sec.append(entries[1])
160 elif (line.find(
".root")!=-1):
161 entry=line.replace(
"\n",
"")
162 if not entry
in prim:
165 if not hasattr(s,
"fileNames"):
166 s.fileNames=cms.untracked.vstring(prim)
168 s.fileNames.extend(prim)
170 if not hasattr(s,
"secondaryFileNames"):
171 s.secondaryFileNames=cms.untracked.vstring(sec)
173 s.secondaryFileNames.extend(sec)
174 print "found files: ",prim
176 print "found parent files:",sec
179 def anyOf(listOfKeys,dict,opt=None):
188 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
191 """The main building routines """ 193 def __init__(self, options, process = None, with_output = False, with_input = False ):
194 """options taken from old cmsDriver and optparse """ 196 options.outfile_name = options.dirout+options.fileout
200 if self._options.isData
and options.isMC:
201 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
206 if 'ENDJOB' in self._options.step:
207 if (hasattr(self.
_options,
"outputDefinition")
and \
208 self._options.outputDefinition !=
'' and \
209 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self._options.outputDefinition)))
or \
210 (hasattr(self.
_options,
"datatier")
and \
211 self._options.datatier
and \
212 'DQMIO' in self._options.datatier):
213 print "removing ENDJOB from steps since not compatible with DQMIO dataTier" 214 self._options.step=self._options.step.replace(
',ENDJOB',
'')
219 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
222 for step
in self._options.step.split(
","):
223 if step==
'':
continue 224 stepParts = step.split(
":")
225 stepName = stepParts[0]
226 if stepName
not in stepList
and not stepName.startswith(
're'):
227 raise ValueError(
"Step "+stepName+
" unknown")
228 if len(stepParts)==1:
229 self.stepMap[stepName]=
"" 230 elif len(stepParts)==2:
231 self.stepMap[stepName]=stepParts[1].
split(
'+')
232 elif len(stepParts)==3:
233 self.stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
235 raise ValueError(
"Step definition "+step+
" invalid")
236 self.stepKeys.append(stepName)
240 self.with_output = with_output
243 if hasattr(self.
_options,
"no_output_flag")
and self._options.no_output_flag:
244 self.with_output =
False 245 self.with_input = with_input
249 self.schedule =
list()
255 self.additionalCommands = []
257 self.blacklist_paths = []
258 self.addedObjects = []
259 self.additionalOutputs = {}
261 self.productionFilterSequence =
None 262 self.labelsToAssociate=[]
263 self.nextScheduleIsConditional=
False 264 self.conditionalPaths=[]
265 self.excludedPaths=[]
270 Function to add the igprof profile service so that you can dump in the middle 273 profileOpts = self._options.profile.split(
':')
275 profilerInterval = 100
276 profilerFormat =
None 277 profilerJobFormat =
None 283 startEvent = profileOpts.pop(0)
284 if not startEvent.isdigit():
285 raise Exception(
"%s is not a number" % startEvent)
286 profilerStart =
int(startEvent)
288 eventInterval = profileOpts.pop(0)
289 if not eventInterval.isdigit():
290 raise Exception(
"%s is not a number" % eventInterval)
291 profilerInterval =
int(eventInterval)
293 profilerFormat = profileOpts.pop(0)
296 if not profilerFormat:
297 profilerFormat =
"%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace(
"_cfi",
""),
299 self._options.pileup,
300 self._options.conditions,
301 self._options.datatier,
302 self._options.profileTypeLabel)
303 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
304 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
305 elif not profilerJobFormat:
306 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 308 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
311 includeFile = includeFile.replace(
'/',
'.')
312 self.process.load(includeFile)
313 return sys.modules[includeFile]
316 """helper routine to load am memorize imports""" 319 includeFile = includeFile.replace(
'/',
'.')
320 self.imports.append(includeFile)
321 self.process.load(includeFile)
322 return sys.modules[includeFile]
325 """helper routine to remember replace statements""" 326 self.additionalCommands.append(command)
327 if not command.strip().startswith(
"#"):
330 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
334 if 'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys():
335 self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring(
'ProductNotFound'),fileMode = cms.untracked.string(
'FULLMERGE'))
337 self.process.options = cms.untracked.PSet( )
339 self.addedObjects.append((
"",
"options"))
341 if self._options.lazy_download:
342 self.process.AdaptorConfig = cms.Service(
"AdaptorConfig",
343 stats = cms.untracked.bool(
True),
344 enable = cms.untracked.bool(
True),
345 cacheHint = cms.untracked.string(
"lazy-download"),
346 readHint = cms.untracked.string(
"read-ahead-buffered")
348 self.addedObjects.append((
"Setup lazy download",
"AdaptorConfig"))
353 if self._options.profile:
355 self.process.IgProfService = cms.Service(
"IgProfService",
356 reportFirstEvent = cms.untracked.int32(start),
357 reportEventInterval = cms.untracked.int32(interval),
358 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
359 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
360 self.addedObjects.append((
"Setup IGProf Service for profiling",
"IgProfService"))
363 """Here we decide how many evts will be processed""" 364 self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(
int(self._options.number)))
365 if self._options.number_out:
366 self.process.maxEvents.output = cms.untracked.int32(
int(self._options.number_out))
367 self.addedObjects.append((
"",
"maxEvents"))
370 """Here the source is built. Priority: file, generator""" 371 self.addedObjects.append((
"Input source",
"source"))
374 for entry
in self._options.filein.split(
','):
376 if entry.startswith(
"filelist:"):
378 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
379 filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
381 self.process.source.fileNames.append(self._options.dirin+entry)
382 if self._options.secondfilein:
383 if not hasattr(self.process.source,
"secondaryFileNames"):
384 raise Exception(
"--secondfilein not compatible with "+self._options.filetype+
"input type")
385 for entry
in self._options.secondfilein.split(
','):
387 if entry.startswith(
"filelist:"):
388 self.process.source.secondaryFileNames.extend((
filesFromList(entry[9:]))[0])
389 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
390 self.process.source.secondaryFileNames.extend((
filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
392 self.process.source.secondaryFileNames.append(self._options.dirin+entry)
394 if self._options.filein
or self._options.dasquery:
395 if self._options.filetype ==
"EDM":
396 self.process.source=cms.Source(
"PoolSource",
397 fileNames = cms.untracked.vstring(),
398 secondaryFileNames= cms.untracked.vstring())
400 elif self._options.filetype ==
"DAT":
401 self.process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
403 elif self._options.filetype ==
"LHE":
404 self.process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
405 if self._options.filein.startswith(
"lhe:"):
407 args=self._options.filein.split(
':')
409 print 'LHE input from article ',article
410 location=
'/store/lhe/' 412 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
413 for line
in textOfFiles:
414 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
415 self.process.source.fileNames.append(location+article+
'/'+fileName)
418 print 'Issue to load LHE files, please check and try again.' 421 if len(self.process.source.fileNames)==0:
422 print 'Issue with empty filename, but can pass line check' 425 self.process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
429 elif self._options.filetype ==
"DQM":
430 self.process.source=cms.Source(
"DQMRootSource",
431 fileNames = cms.untracked.vstring())
434 elif self._options.filetype ==
"DQMDAQ":
436 self.process.source=cms.Source(
"DQMStreamerReader")
439 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
440 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
442 if self._options.dasquery!=
'':
443 self.process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
444 filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
446 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
447 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
450 if 'GEN' in self.stepMap.keys():
451 if self._options.inputCommands:
452 self._options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 454 self._options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 456 if self.process.source
and self._options.inputCommands:
457 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
458 for command
in self._options.inputCommands.split(
','):
460 command = command.strip()
461 if command==
'':
continue 462 self.process.source.inputCommands.append(command)
463 if not self._options.dropDescendant:
464 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
466 if self._options.lumiToProcess:
467 import FWCore.PythonUtilities.LumiList
as LumiList
468 self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange(
LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().
split(
',') )
470 if 'GEN' in self.stepMap.keys()
or 'LHE' in self.stepMap
or (
not self._options.filein
and hasattr(self.
_options,
"evt_type")):
471 if self.process.source
is None:
472 self.process.source=cms.Source(
"EmptySource")
475 self.runsAndWeights=
None 476 if self._options.runsAndWeightsForMC
or self._options.runsScenarioForMC :
477 if not self._options.isMC :
478 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
479 if self._options.runsAndWeightsForMC:
480 self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
482 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
483 if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
484 __import__(RunsAndWeights[self._options.runsScenarioForMC])
485 self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
487 self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
489 if self.runsAndWeights:
490 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
492 self.additionalCommands.append(
'import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
493 self.additionalCommands.append(
'ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
498 """ Add output module to the process """ 500 if self._options.outputDefinition:
501 if self._options.datatier:
502 print "--datatier & --eventcontent options ignored" 505 outList = eval(self._options.outputDefinition)
506 for (id,outDefDict)
in enumerate(outList):
507 outDefDictStr=outDefDict.__str__()
508 if not isinstance(outDefDict,dict):
509 raise Exception(
"--output needs to be passed a list of dict"+self._options.outputDefinition+
" is invalid")
511 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
514 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
515 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
516 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
517 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
518 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
520 if not theModuleLabel:
521 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
522 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
523 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 525 for name
in tryNames:
526 if not hasattr(self.process,name):
529 if not theModuleLabel:
530 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
532 defaultFileName=self._options.outfile_name
534 defaultFileName=self._options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
536 theFileName=self._options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
537 if not theFileName.endswith(
'.root'):
541 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
542 if theStreamType==
'DQMIO': theStreamType=
'DQM' 543 if theStreamType==
'ALL':
544 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
546 theEventContent = getattr(self.process, theStreamType+
"EventContent")
550 if theStreamType==
'ALCARECO' and not theFilterName:
551 theFilterName=
'StreamALCACombined' 554 CppType=
'PoolOutputModule' 555 if self._options.timeoutOutput:
556 CppType=
'TimeoutPoolOutputModule' 557 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 558 output = cms.OutputModule(CppType,
559 theEventContent.clone(),
560 fileName = cms.untracked.string(theFileName),
561 dataset = cms.untracked.PSet(
562 dataTier = cms.untracked.string(theTier),
563 filterName = cms.untracked.string(theFilterName))
565 if not theSelectEvent
and hasattr(self.process,
'generation_step')
and theStreamType!=
'LHE':
566 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
567 if not theSelectEvent
and hasattr(self.process,
'filtering_step'):
568 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
570 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
573 if not hasattr(output,
'SelectEvents'):
574 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
575 for alca
in self.AlCaPaths:
576 output.SelectEvents.SelectEvents.extend(getattr(self.process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
579 if hasattr(self.process,theModuleLabel):
580 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
582 setattr(self.process,theModuleLabel,output)
583 outputModule=getattr(self.process,theModuleLabel)
584 setattr(self.process,theModuleLabel+
'_step',cms.EndPath(outputModule))
585 path=getattr(self.process,theModuleLabel+
'_step')
586 self.schedule.append(path)
588 if not self._options.inlineEventContent
and hasattr(self.process,theStreamType+
"EventContent"):
591 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
592 if theExtraOutputCommands:
593 if not isinstance(theExtraOutputCommands,list):
594 raise Exception(
"extra ouput command in --option must be a list of strings")
595 if hasattr(self.process,theStreamType+
"EventContent"):
596 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
598 outputModule.outputCommands.extend(theExtraOutputCommands)
600 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
605 streamTypes=self._options.eventcontent.split(
',')
606 tiers=self._options.datatier.split(
',')
607 if not self._options.outputDefinition
and len(streamTypes)!=len(tiers):
608 raise Exception(
"number of event content arguments does not match number of datatier arguments")
611 if self._options.step.split(
',')[0].
split(
':')[0] ==
'ALCA':
614 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
615 if streamType==
'':
continue 616 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self._options.step:
continue 617 if streamType==
'DQMIO': streamType=
'DQM' 618 eventContent=streamType
620 if streamType ==
"NANOEDMAOD" :
621 eventContent =
"NANOAOD" 622 elif streamType ==
"NANOEDMAODSIM" :
623 eventContent =
"NANOAODSIM" 624 theEventContent = getattr(self.process, eventContent+
"EventContent")
626 theFileName=self._options.outfile_name
627 theFilterName=self._options.filtername
629 theFileName=self._options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
630 theFilterName=self._options.filtername
631 CppType=
'PoolOutputModule' 632 if self._options.timeoutOutput:
633 CppType=
'TimeoutPoolOutputModule' 634 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 635 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 636 output = cms.OutputModule(CppType,
638 fileName = cms.untracked.string(theFileName),
639 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
640 filterName = cms.untracked.string(theFilterName)
643 if hasattr(self.process,
"generation_step")
and streamType!=
'LHE':
644 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
645 if hasattr(self.process,
"filtering_step"):
646 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
648 if streamType==
'ALCARECO':
649 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
651 if "MINIAOD" in streamType:
652 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
655 outputModuleName=streamType+
'output' 656 setattr(self.process,outputModuleName,output)
657 outputModule=getattr(self.process,outputModuleName)
658 setattr(self.process,outputModuleName+
'_step',cms.EndPath(outputModule))
659 path=getattr(self.process,outputModuleName+
'_step')
660 self.schedule.append(path)
662 if self._options.outputCommands
and streamType!=
'DQM':
663 for evct
in self._options.outputCommands.split(
','):
664 if not evct:
continue 665 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
667 if not self._options.inlineEventContent:
668 tmpstreamType=streamType
669 if "NANOEDM" in tmpstreamType :
670 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
673 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
675 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
681 Add selected standard sequences to the process 684 if self._options.pileup:
685 pileupSpec=self._options.pileup.split(
',')[0]
688 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
689 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
690 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
694 if '.' in pileupSpec:
695 mixingDict={
'file':pileupSpec}
696 elif pileupSpec.startswith(
'file:'):
697 mixingDict={
'file':pileupSpec[5:]}
700 mixingDict=copy.copy(Mixing[pileupSpec])
701 if len(self._options.pileup.split(
','))>1:
702 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(
',')+1:]))
705 if 'file:' in pileupSpec:
707 self.process.load(mixingDict[
'file'])
708 print "inlining mixing module configuration" 709 self._options.inlineObjets+=
',mix' 711 self.loadAndRemember(mixingDict[
'file'])
713 mixingDict.pop(
'file')
714 if not "DATAMIX" in self.stepMap.keys():
715 if self._options.pileup_input:
716 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
717 mixingDict[
'F']=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
718 elif self._options.pileup_input.startswith(
"filelist:"):
719 mixingDict[
'F']=(
filesFromList(self._options.pileup_input[9:]))[0]
721 mixingDict[
'F']=self._options.pileup_input.split(
',')
723 for command
in specialization:
724 self.executeAndRemember(command)
725 if len(mixingDict)!=0:
726 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
731 if len(self.stepMap):
732 self.loadAndRemember(self.GeometryCFF)
733 if (
'SIM' in self.stepMap
or 'reSIM' in self.stepMap)
and not self._options.fast:
734 self.loadAndRemember(self.SimGeometryCFF)
735 if self.geometryDBLabel:
736 self.executeAndRemember(
'process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
738 print "Geometry option",self._options.geometry,
"unknown." 741 if len(self.stepMap):
742 self.loadAndRemember(self.magFieldCFF)
744 for stepName
in self.stepKeys:
745 stepSpec = self.stepMap[stepName]
746 print "Step:", stepName,
"Spec:",stepSpec
747 if stepName.startswith(
're'):
749 if stepName[2:]
not in self._options.donotDropOnInput:
750 self._options.inputEventContent=
'%s,%s'%(stepName.upper(),self._options.inputEventContent)
751 stepName=stepName[2:]
753 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
754 elif isinstance(stepSpec, list):
755 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
756 elif isinstance(stepSpec, tuple):
757 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
759 raise ValueError(
"Invalid step definition")
761 if self._options.restoreRNDSeeds!=
False:
763 if self._options.restoreRNDSeeds==
True:
764 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
766 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
767 if self._options.inputEventContent
or self._options.inputCommands:
768 if self._options.inputCommands:
769 self._options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 771 self._options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 775 if self._options.inputEventContent:
777 def dropSecondDropStar(iec):
788 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
789 for evct
in self._options.inputEventContent.split(
','):
790 if evct==
'':
continue 791 theEventContent = getattr(self.process, evct+
"EventContent")
792 if hasattr(theEventContent,
'outputCommands'):
793 self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
794 if hasattr(theEventContent,
'inputCommands'):
795 self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
797 dropSecondDropStar(self.process.source.inputCommands)
799 if not self._options.dropDescendant:
800 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
806 """Add conditions to the process""" 807 if not self._options.conditions:
return 809 if 'FrontierConditions_GlobalTag' in self._options.conditions:
810 print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line' 811 self._options.conditions = self._options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
813 self.loadAndRemember(self.ConditionsDefaultCFF)
815 self.process.GlobalTag =
GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
816 self.additionalCommands.append(
'from Configuration.AlCa.GlobalTag import GlobalTag')
817 self.additionalCommands.append(
'process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
821 """Include the customise code """ 825 for c
in self._options.customisation_file:
826 custOpt.extend(c.split(
","))
828 for c
in self._options.customisation_file_unsch:
829 custOpt.extend(c.split(
","))
835 raise Exception(
"more than . in the specification:"+opt)
836 fileName=opt.split(
'.')[0]
837 if opt.count(
'.')==0: rest=
'customise' 839 rest=opt.split(
'.')[1]
840 if rest==
'py': rest=
'customise' 842 if fileName
in custMap:
843 custMap[fileName].extend(rest.split(
'+'))
845 custMap[fileName]=rest.split(
'+')
850 final_snippet=
'\n# customisation of the process.\n' 854 allFcn.extend(custMap[opt])
856 if allFcn.count(fcn)!=1:
857 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
861 packageName = f.replace(
".py",
"").
replace(
"/",
".")
862 __import__(packageName)
863 package = sys.modules[packageName]
866 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
868 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 869 if self._options.inline_custom:
870 for line
in file(customiseFile,
'r'): 871 if "import FWCore.ParameterSet.Config" in line:
873 final_snippet += line
875 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
876 for fcn
in custMap[f]:
877 print "customising the process with",fcn,
"from",f
878 if not hasattr(package,fcn):
880 raise Exception(
"config "+f+
" has no function "+fcn)
882 self.process=getattr(package,fcn)(self.process)
884 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
885 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
888 final_snippet +=
'\n# End of customisation functions\n' 894 final_snippet=
'\n# Customisation from command line\n' 895 if self._options.customise_commands:
897 for com
in self._options.customise_commands.split(
'\\n'):
898 com=string.lstrip(com)
900 final_snippet +=
'\n'+com
909 if len(self.stepMap):
911 if self._options.particleTable
not in defaultOptions.particleTableList:
912 print 'Invalid particle table provided. Options are:' 913 print defaultOptions.particleTable
916 if len(self.stepMap):
917 self.
loadAndRemember(
'SimGeneral.HepPDTESSource.'+self._options.particleTable+
'_cfi')
936 self.EIDefaultCFF=
None 937 self.SKIMDefaultCFF=
"Configuration/StandardSequences/Skims_cff" 938 self.POSTRECODefaultCFF=
"Configuration/StandardSequences/PostRecoGenerator_cff" 939 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/Validation_cff" 940 self.L1HwValDefaultCFF =
"Configuration/StandardSequences/L1HwVal_cff" 941 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOffline_cff" 942 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/Harvesting_cff" 943 self.ALCAHARVESTDefaultCFF=
"Configuration/StandardSequences/AlCaHarvesting_cff" 944 self.ENDJOBDefaultCFF=
"Configuration/StandardSequences/EndOfProcess_cff" 945 self.ConditionsDefaultCFF =
"Configuration/StandardSequences/FrontierConditions_GlobalTag_cff" 946 self.CFWRITERDefaultCFF =
"Configuration/StandardSequences/CrossingFrameWriter_cff" 947 self.REPACKDefaultCFF=
"Configuration/StandardSequences/DigiToRaw_Repack_cff" 949 if "DATAMIX" in self.stepMap.keys():
950 self.DATAMIXDefaultCFF=
"Configuration/StandardSequences/DataMixer"+self._options.datamix+
"_cff" 953 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 955 self.ALCADefaultSeq=
None 956 self.LHEDefaultSeq=
'externalLHEProducer' 957 self.GENDefaultSeq=
'pgen' 958 self.SIMDefaultSeq=
'psim' 959 self.DIGIDefaultSeq=
'pdigi' 960 self.DATAMIXDefaultSeq=
None 961 self.DIGI2RAWDefaultSeq=
'DigiToRaw' 962 self.HLTDefaultSeq=
'GRun' 963 self.L1DefaultSeq=
None 964 self.L1REPACKDefaultSeq=
'GT' 965 self.HARVESTINGDefaultSeq=
None 966 self.ALCAHARVESTDefaultSeq=
None 967 self.CFWRITERDefaultSeq=
None 968 self.RAW2DIGIDefaultSeq=
'RawToDigi' 969 self.L1RecoDefaultSeq=
'L1Reco' 970 self.L1TrackTriggerDefaultSeq=
'L1TrackTrigger' 971 if self._options.fast
or (
'RAW2DIGI' in self.stepMap
and 'RECO' in self.stepMap):
972 self.RECODefaultSeq=
'reconstruction' 974 self.RECODefaultSeq=
'reconstruction_fromRECO' 975 self.RECOSIMDefaultSeq=
'recosim' 976 self.EIDefaultSeq=
'top' 977 self.POSTRECODefaultSeq=
None 978 self.L1HwValDefaultSeq=
'L1HwVal' 979 self.DQMDefaultSeq=
'DQMOffline' 980 self.VALIDATIONDefaultSeq=
'' 981 self.ENDJOBDefaultSeq=
'endOfProcess' 982 self.REPACKDefaultSeq=
'DigiToRawRepack' 983 self.PATDefaultSeq=
'miniAOD' 984 self.PATGENDefaultSeq=
'miniGEN' 985 self.NANODefaultSeq=
'nanoSequence' 987 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContent_cff" 989 if not self._options.beamspot:
990 self._options.beamspot=VtxSmearedDefaultKey
993 if self._options.isMC==
True:
995 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 996 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 997 self.PATGENDefaultCFF=
"Configuration/StandardSequences/PATGEN_cff" 998 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineMC_cff" 999 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1000 self.NANODefaultSeq=
'nanoSequenceMC' 1002 self._options.beamspot =
None 1005 if 'reGEN' in self.stepMap:
1006 self.GENDefaultSeq=
'fixGenInfo' 1008 if self._options.scenario==
'cosmics':
1009 self._options.pileup=
'Cosmics' 1010 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1011 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1012 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1013 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentCosmics_cff" 1014 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationCosmics_cff" 1015 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmics_cff" 1016 if self._options.isMC==
True:
1017 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmicsMC_cff" 1018 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingCosmics_cff" 1019 self.RECODefaultSeq=
'reconstructionCosmics' 1020 self.DQMDefaultSeq=
'DQMOfflineCosmics' 1022 if self._options.scenario==
'HeavyIons':
1023 if not self._options.beamspot:
1024 self._options.beamspot=VtxSmearedHIDefaultKey
1025 self.HLTDefaultSeq =
'HIon' 1026 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationHeavyIons_cff" 1027 self.VALIDATIONDefaultSeq=
'' 1028 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentHeavyIons_cff" 1029 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1030 self.RECODefaultSeq=
'reconstructionHeavyIons' 1031 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1032 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIons_cff" 1033 self.DQMDefaultSeq=
'DQMOfflineHeavyIons' 1034 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1035 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingHeavyIons_cff" 1036 if self._options.isMC==
True:
1037 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff" 1040 self.RAW2RECODefaultSeq=
','.
join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1042 self.USERDefaultSeq=
'user' 1043 self.USERDefaultCFF=
None 1046 if self._options.isData:
1047 if self._options.magField==defaultOptions.magField:
1048 print "magnetic field option forced to: AutoFromDBCurrent" 1049 self._options.magField=
'AutoFromDBCurrent' 1050 self.magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace(
'.',
'')+
'_cff' 1051 self.magFieldCFF = self.magFieldCFF.replace(
"__",
'_')
1054 self.GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff' 1055 self.geometryDBLabel=
None 1057 if self._options.fast:
1058 if 'start' in self._options.conditions.lower():
1059 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1061 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1064 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1065 if opt
in GeometryConf:
1066 return GeometryConf[opt]
1070 geoms=self._options.geometry.split(
',')
1074 if '/' in geoms[1]
or '_cff' in geoms[1]:
1075 self.GeometryCFF=geoms[1]
1077 self.GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1079 if (geoms[0].startswith(
'DB:')):
1080 self.SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff' 1081 self.geometryDBLabel=geoms[0][3:]
1084 if '/' in geoms[0]
or '_cff' in geoms[0]:
1085 self.SimGeometryCFF=geoms[0]
1087 simGeometry=geoms[0]
1088 if self._options.gflash==
True:
1089 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1091 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1094 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1095 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1097 if self._options.scenario==
'nocoll' or self._options.scenario==
'cosmics':
1098 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1099 self._options.beamspot=
'NoSmear' 1102 if self._options.fast:
1103 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1104 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1105 self.RECOBEFMIXDefaultCFF =
'FastSimulation.Configuration.Reconstruction_BefMix_cff' 1106 self.RECOBEFMIXDefaultSeq =
'reconstruction_befmix' 1107 self.DQMOFFLINEDefaultCFF=
"FastSimulation.Configuration.DQMOfflineMC_cff" 1110 if self._options.pileup==
'default':
1111 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1112 self._options.pileup=MixingDefaultKey
1116 if self._options.isData:
1117 self._options.pileup=
None 1120 self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1125 output = cms.OutputModule(
"PoolOutputModule")
1126 if stream.selectEvents.parameters_().__len__()!=0:
1127 output.SelectEvents = stream.selectEvents
1129 output.SelectEvents = cms.untracked.PSet()
1130 output.SelectEvents.SelectEvents=cms.vstring()
1131 if isinstance(stream.paths,tuple):
1132 for path
in stream.paths:
1133 output.SelectEvents.SelectEvents.append(path.label())
1135 output.SelectEvents.SelectEvents.append(stream.paths.label())
1139 if isinstance(stream.content,str):
1140 evtPset=getattr(self.process,stream.content)
1141 for p
in evtPset.parameters_():
1142 setattr(output,p,getattr(evtPset,p))
1143 if not self._options.inlineEventContent:
1146 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1148 output.outputCommands = stream.content
1151 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1153 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1154 filterName = cms.untracked.string(stream.name))
1156 if self._options.filtername:
1157 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1160 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1162 if workflow
in (
"producers,full"):
1163 if isinstance(stream.paths,tuple):
1164 for path
in stream.paths:
1165 self.schedule.append(path)
1167 self.schedule.append(stream.paths)
1171 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1172 self.additionalOutputs[name] = output
1173 setattr(self.process,name,output)
1175 if workflow ==
'output':
1177 filterList = output.SelectEvents.SelectEvents
1178 for i, filter
in enumerate(filterList):
1179 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1189 if ( len(sequence.split(
'.'))==1 ):
1191 elif ( len(sequence.split(
'.'))==2 ):
1193 sequence=sequence.split(
'.')[1]
1195 print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a" 1196 print sequence,
"not recognized" 1203 for i,s
in enumerate(seq.split(
'*')):
1205 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1207 p=getattr(self.process,prefix)
1208 p+=getattr(self.process, s)
1209 self.schedule.append(getattr(self.process,prefix))
1214 if self.nextScheduleIsConditional:
1215 self.conditionalPaths.append(prefix)
1216 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1217 self.schedule.append(getattr(self.process,prefix))
1219 for i,s
in enumerate(seq.split(
'+')):
1221 setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1222 self.schedule.append(getattr(self.process,sn))
1236 """ Enrich the process with alca streams """ 1238 sequence = sequence.split(
'.')[-1]
1241 alcaList = sequence.split(
"+")
1243 from Configuration.AlCa.autoAlca
import autoAlca
1247 for name
in alcaConfig.__dict__:
1248 alcastream = getattr(alcaConfig,name)
1249 shortName = name.replace(
'ALCARECOStream',
'')
1250 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1251 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1252 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1253 self.AlCaPaths.append(shortName)
1254 if 'DQM' in alcaList:
1255 if not self._options.inlineEventContent
and hasattr(self.process,name):
1256 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1258 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1261 if self._options.hltProcess
or 'HLT' in self.stepMap:
1262 if isinstance(alcastream.paths,tuple):
1263 for path
in alcastream.paths:
1268 for i
in range(alcaList.count(shortName)):
1269 alcaList.remove(shortName)
1272 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1273 path = getattr(alcaConfig,name)
1274 self.schedule.append(path)
1275 alcaList.remove(
'DQM')
1277 if isinstance(alcastream,cms.Path):
1279 self.blacklist_paths.append(alcastream)
1282 if len(alcaList) != 0:
1284 for name
in alcaConfig.__dict__:
1285 alcastream = getattr(alcaConfig,name)
1286 if isinstance(alcastream,cms.FilteredStream):
1287 available.append(name.replace(
'ALCARECOStream',
''))
1288 print "The following alcas could not be found "+
str(alcaList)
1289 print "available ",available
1291 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1296 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1297 print "Loading lhe fragment from",loadFragment
1298 __import__(loadFragment)
1299 self.process.load(loadFragment)
1301 self._options.inlineObjets+=
','+sequence
1303 getattr(self.process,sequence).nEvents =
int(self._options.number)
1306 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1307 self.excludedPaths.append(
"lhe_step")
1308 self.schedule.append( self.process.lhe_step )
1311 """ load the fragment of generator configuration """ 1316 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'')
1318 if not '/' in loadFragment:
1319 loadFragment=
'Configuration.Generator.'+loadFragment
1321 loadFragment=loadFragment.replace(
'/',
'.')
1323 print "Loading generator fragment from",loadFragment
1324 __import__(loadFragment)
1328 if not (self._options.filein
or self._options.dasquery):
1329 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1332 generatorModule=sys.modules[loadFragment]
1333 genModules=generatorModule.__dict__
1336 if self.LHEDefaultSeq
in genModules:
1337 del genModules[self.LHEDefaultSeq]
1339 if self._options.hideGen:
1340 self.loadAndRemember(loadFragment)
1342 self.process.load(loadFragment)
1344 import FWCore.ParameterSet.Modules
as cmstypes
1345 for name
in genModules:
1346 theObject = getattr(generatorModule,name)
1347 if isinstance(theObject, cmstypes._Module):
1348 self._options.inlineObjets=name+
','+self._options.inlineObjets
1349 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1350 self._options.inlineObjets+=
','+name
1352 if sequence == self.GENDefaultSeq
or sequence ==
'pgen_genonly':
1353 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1354 self.productionFilterSequence =
'ProductionFilterSequence' 1355 elif 'generator' in genModules:
1356 self.productionFilterSequence =
'generator' 1358 """ Enrich the schedule with the rest of the generation step """ 1359 self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1360 genSeqName=sequence.split(
'.')[-1]
1364 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1365 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1366 self.loadAndRemember(cffToBeLoaded)
1368 raise Exception(
"VertexSmearing type or beamspot "+self._options.beamspot+
" unknown.")
1370 if self._options.scenario ==
'HeavyIons':
1371 if self._options.pileup==
'HiMixGEN':
1372 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1374 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1376 self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1377 self.schedule.append(self.process.generation_step)
1380 self.executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1382 if 'reGEN' in self.stepMap:
1386 """ Enrich the schedule with the summary of the filter step """ 1388 self.loadAndRemember(
"GeneratorInterface/Core/genFilterSummary_cff")
1389 self.scheduleSequenceAtEnd(
'genFilterSummary',
'genfiltersummary_step')
1393 """ Enrich the schedule with the simulation step""" 1394 self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1395 if not self._options.fast:
1396 if self._options.gflash==
True:
1397 self.loadAndRemember(
"Configuration/StandardSequences/GFlashSIM_cff")
1399 if self._options.magField==
'0T':
1400 self.executeAndRemember(
"process.g4SimHits.UseMagneticField = cms.bool(False)")
1402 if self._options.magField==
'0T':
1403 self.executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1405 self.scheduleSequence(sequence.split(
'.')[-1],
'simulation_step')
1409 """ Enrich the schedule with the digitisation step""" 1412 if self._options.gflash==
True:
1413 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1415 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1416 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1418 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and not self.process.source.type_()==
'EmptySource':
1419 if self._options.inputEventContent==
'':
1420 self._options.inputEventContent=
'REGEN' 1422 self._options.inputEventContent=self._options.inputEventContent+
',REGEN' 1429 """ Enrich the schedule with the crossing frame writer step""" 1435 """ Enrich the schedule with the digitisation step""" 1439 if self._options.pileup_input:
1441 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
1442 theFiles=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1443 elif self._options.pileup_input.startswith(
"filelist:"):
1444 theFiles= (
filesFromList(self._options.pileup_input[9:]))[0]
1446 theFiles=self._options.pileup_input.split(
',')
1448 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1463 """ Enrich the schedule with the L1 simulation step""" 1464 assert(sequence ==
None)
1470 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1471 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1472 if sequence
in supported:
1473 self.loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1474 if self._options.scenario ==
'HeavyIons':
1475 self.renameInputTagsInSequence(
"SimL1Emulator",
"rawDataCollector",
"rawDataRepacker")
1476 self.scheduleSequence(
'SimL1Emulator',
'L1RePack_step')
1478 print "L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported
1483 """ Enrich the schedule with the HLT simulation step""" 1485 print "no specification of the hlt menu has been given, should never happen" 1486 raise Exception(
'no HLT sequence provided')
1490 from Configuration.HLT.autoHLT
import autoHLT
1493 sequence = autoHLT[key]
1495 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1501 if self._options.scenario ==
'HeavyIons':
1502 optionsForHLT[
'type'] =
'HIon' 1504 optionsForHLT[
'type'] =
'GRun' 1505 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in six.iteritems(optionsForHLT))
1506 if sequence ==
'run,fromSource':
1507 if hasattr(self.process.source,
'firstRun'):
1508 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1509 elif hasattr(self.process.source,
'setRunNumber'):
1510 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1512 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1514 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1518 if self._options.isMC:
1519 self._options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1521 if self._options.name !=
'HLT':
1522 self.additionalCommands.append(
'from HLTrigger.Configuration.CustomConfigs import ProcessName')
1523 self.additionalCommands.append(
'process = ProcessName(process)')
1524 self.additionalCommands.append(
'')
1525 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1528 self.schedule.append(self.process.HLTSchedule)
1529 [self.blacklist_paths.append(path)
for path
in self.process.HLTSchedule
if isinstance(path,(cms.Path,cms.EndPath))]
1532 if self._options.fast:
1533 if not hasattr(self.process,
'HLTEndSequence'):
1534 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1539 seqReco=sequence.split(
',')[1]
1540 seqDigi=sequence.split(
',')[0]
1542 print "RAW2RECO requires two specifications",sequence,
"insufficient" 1556 self.
loadAndRemember(
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1558 for filt
in allMetFilterPaths:
1559 self.schedule.append(getattr(self.process,
'Flag_'+filt))
1562 ''' Enrich the schedule with L1 HW validation ''' 1565 print '\n\n\n DEPRECATED this has no action \n\n\n' 1569 ''' Enrich the schedule with L1 reconstruction ''' 1575 ''' Enrich the schedule with L1 reconstruction ''' 1581 ''' Enrich the schedule with a user defined filter sequence ''' 1583 filterConfig=self.load(sequence.split(
'.')[0])
1584 filterSeq=sequence.split(
'.')[-1]
1592 label=visitee.label()
1600 getattr(self.process,filterSeq).
visit( expander )
1601 self._options.inlineObjets+=
','+expander.inliner
1602 self._options.inlineObjets+=
','+filterSeq
1605 self.scheduleSequence(filterSeq,
'filtering_step')
1606 self.nextScheduleIsConditional=
True 1608 self.productionFilterSequence = filterSeq
1613 ''' Enrich the schedule with reconstruction ''' 1619 ''' Enrich the schedule with reconstruction ''' 1625 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1626 if not self._options.fast:
1627 print "ERROR: this step is only implemented for FastSim" 1630 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1634 ''' Enrich the schedule with PAT ''' 1637 self.labelsToAssociate.append(
'patTask')
1638 if not self._options.runUnscheduled:
1639 raise Exception(
"MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1640 if self._options.isData:
1641 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1643 if self._options.fast:
1644 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1646 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1648 if self._options.hltProcess:
1649 if len(self._options.customise_commands) > 1:
1650 self._options.customise_commands = self._options.customise_commands +
" \n" 1651 self._options.customise_commands = self._options.customise_commands +
"process.patTrigger.processName = \""+self._options.hltProcess+
"\"\n" 1652 self._options.customise_commands = self._options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1653 self._options.customise_commands = self._options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1660 ''' Enrich the schedule with PATGEN ''' 1662 self.labelsToAssociate.append(
'patGENTask')
1663 if not self._options.runUnscheduled:
1664 raise Exception(
"MiniGEN production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1665 if self._options.isData:
1666 raise Exception(
"PATGEN step can only run on MC")
1670 ''' Enrich the schedule with NANO ''' 1673 custom =
"nanoAOD_customizeData" if self._options.isData
else "nanoAOD_customizeMC" 1674 if self._options.runUnscheduled:
1675 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1677 self._options.customisation_file.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1678 if self._options.hltProcess:
1679 if len(self._options.customise_commands) > 1:
1680 self._options.customise_commands = self._options.customise_commands +
" \n" 1681 self._options.customise_commands = self._options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1685 ''' Enrich the schedule with event interpretation ''' 1686 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1687 if sequence
in EventInterpretation:
1688 self.EIDefaultCFF = EventInterpretation[sequence]
1689 sequence =
'EIsequence' 1691 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1692 self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1693 self.scheduleSequence(sequence.split(
'.')[-1],
'eventinterpretaion_step')
1697 ''' Enrich the schedule with skimming fragments''' 1699 sequence = sequence.split(
'.')[-1]
1701 skimlist=sequence.split(
'+')
1703 from Configuration.Skimming.autoSkim
import autoSkim
1707 for skim
in skimConfig.__dict__:
1708 skimstream = getattr(skimConfig,skim)
1709 if isinstance(skimstream,cms.Path):
1711 self.blacklist_paths.append(skimstream)
1712 if (
not isinstance(skimstream,cms.FilteredStream)):
1714 shortname = skim.replace(
'SKIMStream',
'')
1715 if (sequence==
"all"):
1717 elif (shortname
in skimlist):
1720 if self._options.datatier==
'DQM':
1721 self.process.load(self.EVTCONTDefaultCFF)
1722 skimstreamDQM = cms.FilteredStream(
1723 responsible = skimstream.responsible,
1724 name = skimstream.name+
'DQM',
1725 paths = skimstream.paths,
1726 selectEvents = skimstream.selectEvents,
1727 content = self._options.datatier+
'EventContent',
1728 dataTier = cms.untracked.string(self._options.datatier)
1731 for i
in range(skimlist.count(shortname)):
1732 skimlist.remove(shortname)
1736 if (skimlist.__len__()!=0
and sequence!=
"all"):
1737 print 'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist)
1738 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1741 ''' Enrich the schedule with a user defined sequence ''' 1747 """ Enrich the schedule with the postreco step """ 1754 print sequence,
"in preparing validation" 1756 from Validation.Configuration.autoValidation
import autoValidation
1758 sequence=sequence.split(
'.')[-1]
1759 if sequence.find(
',')!=-1:
1760 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1761 valSeqName=sequence.split(
',')[1].
split(
'+')
1766 prevalSeqName=sequence.split(
'+')
1767 valSeqName=sequence.split(
'+')
1773 postfix=
'_'+sequence
1774 prevalSeqName=[
'prevalidation'+postfix]
1775 valSeqName=[
'validation'+postfix]
1776 if not hasattr(self.process,valSeqName[0]):
1778 valSeqName=[sequence]
1789 if (
'HLT' in self.stepMap
and not self._options.fast)
or self._options.hltProcess:
1790 for s
in valSeqName+prevalSeqName:
1793 for (i,s)
in enumerate(prevalSeqName):
1795 setattr(self.process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1796 self.schedule.append(getattr(self.process,
'prevalidation_step%s'%NFI(i)))
1798 for (i,s)
in enumerate(valSeqName):
1799 setattr(self.process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1800 self.schedule.append(getattr(self.process,
'validation_step%s'%NFI(i)))
1803 if 'PAT' in self.stepMap
and not 'RECO' in self.stepMap:
1806 if not 'DIGI' in self.stepMap
and not self._options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1807 if self._options.restoreRNDSeeds==
False and not self._options.restoreRNDSeeds==
True:
1808 self._options.restoreRNDSeeds=
True 1810 if not 'DIGI' in self.stepMap
and not self._options.fast:
1814 self._options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1816 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1818 for (i,s)
in enumerate(valSeqName):
1819 getattr(self.process,
'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,
'validation_step%s'%NFI(i))._seq
1825 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1826 It will climb down within PSets, VPSets and VInputTags to find its target""" 1827 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1834 if isinstance(pset, cms._Parameterizable):
1835 for name
in pset.parameters_().
keys():
1841 value = getattr(pset,name)
1842 type = value.pythonTypeName()
1843 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1844 self.
doIt(value,base+
"."+name)
1845 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1846 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1847 elif type
in (
'cms.string',
'cms.untracked.string'):
1849 if self.
_verbose:
print "set string process name %s.%s %s ==> %s"% (base, name, value, self.
_paramReplace)
1851 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1852 for (i,n)
in enumerate(value):
1853 if not isinstance(n, cms.InputTag):
1857 if self.
_verbose:
print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self.
_paramReplace)
1860 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1861 for (i,n)
in enumerate(value):
1864 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1866 if self.
_verbose:
print "set process name %s.%s %s ==> %s " % (base, name, value, self.
_paramReplace)
1867 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1872 label = visitee.label()
1873 except AttributeError:
1874 label =
'<Module not in a Process>' 1876 label =
'other execption' 1877 self.
doIt(visitee, label)
1884 print "Replacing all InputTag %s => %s"%(oldT,newT)
1887 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1888 if not loadMe
in self.additionalCommands:
1889 self.additionalCommands.append(loadMe)
1890 self.additionalCommands.append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1894 if self._options.hltProcess:
1895 proc=self._options.hltProcess
1897 proc=self.process.name_()
1898 if proc==HLTprocess:
return 1900 print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1902 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1903 self.additionalCommands.append(
'from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1904 self.additionalCommands.append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1910 while '@' in repr(seqList)
and level<maxLevel:
1912 for specifiedCommand
in seqList:
1913 if specifiedCommand.startswith(
'@'):
1914 location=specifiedCommand[1:]
1915 if not location
in mapping:
1916 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1917 mappedTo=mapping[location]
1919 mappedTo=mappedTo[index]
1920 seqList.remove(specifiedCommand)
1921 seqList.extend(mappedTo.split(
'+'))
1924 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1930 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1931 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1932 from DQMOffline.Configuration.autoDQM
import autoDQM
1936 if len(set(sequenceList))!=len(sequenceList):
1937 sequenceList=
list(set(sequenceList))
1938 print "Duplicate entries for DQM:, using",sequenceList
1940 pathName=
'dqmoffline_step' 1941 for (i,sequence)
in enumerate(sequenceList):
1943 pathName=
'dqmoffline_%d_step'%(i)
1945 if 'HLT' in self.stepMap.keys()
or self._options.hltProcess:
1948 setattr(self.process,pathName, cms.EndPath( getattr(self.process,sequence ) ) )
1949 self.schedule.append(getattr(self.process,pathName))
1951 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1953 getattr(self.process,pathName).
insert(0,self.process.genstepfilter)
1955 pathName=
'dqmofflineOnPAT_step' 1956 for (i,sequence)
in enumerate(postSequenceList):
1958 pathName=
'dqmofflineOnPAT_%d_step'%(i)
1960 setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1961 self.schedule.append(getattr(self.process,pathName))
1964 """ Enrich the process with harvesting step """ 1965 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self._options.harvesting+
'_cff' 1969 sequence = sequence.split(
'.')[-1]
1972 harvestingList = sequence.split(
"+")
1973 from DQMOffline.Configuration.autoDQM
import autoDQM
1974 from Validation.Configuration.autoValidation
import autoValidation
1976 combined_mapping = copy.deepcopy( autoDQM )
1977 combined_mapping.update( autoValidation )
1978 self.
expandMapping(harvestingList,combined_mapping,index=-1)
1980 if len(set(harvestingList))!=len(harvestingList):
1981 harvestingList=
list(set(harvestingList))
1982 print "Duplicate entries for HARVESTING, using",harvestingList
1984 for name
in harvestingList:
1985 if not name
in harvestingConfig.__dict__:
1986 print name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1988 harvestingstream = getattr(harvestingConfig,name)
1989 if isinstance(harvestingstream,cms.Path):
1990 self.schedule.append(harvestingstream)
1991 self.blacklist_paths.append(harvestingstream)
1992 if isinstance(harvestingstream,cms.Sequence):
1993 setattr(self.process,name+
"_step",cms.Path(harvestingstream))
1994 self.schedule.append(getattr(self.process,name+
"_step"))
2000 """ Enrich the process with AlCaHarvesting step """ 2002 sequence=sequence.split(
".")[-1]
2005 harvestingList = sequence.split(
"+")
2009 from Configuration.AlCa.autoPCL
import autoPCL
2012 for name
in harvestingConfig.__dict__:
2013 harvestingstream = getattr(harvestingConfig,name)
2014 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2015 self.schedule.append(harvestingstream)
2016 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2017 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2018 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2019 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2021 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2022 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2023 harvestingList.remove(name)
2025 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2026 self.schedule.append(lastStep)
2028 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2029 print "The following harvesting could not be found : ", harvestingList
2030 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2040 self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2041 self.schedule.append(self.process.reconstruction)
2045 """ Add useful info for the production. """ 2046 self.process.configurationMetadata=cms.untracked.PSet\
2047 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2048 name=cms.untracked.string(
"Applications"),
2049 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2052 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2057 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2058 self.
pythonCfgCode +=
"# with command line options: "+self._options.arguments+
'\n' 2059 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2064 modifierImports=[
'from Configuration.StandardSequences.Eras import eras']
2066 if hasattr(self.
_options,
"era")
and self._options.era :
2068 from Configuration.StandardSequences.Eras
import eras
2069 for requestedEra
in self._options.era.split(
",") :
2070 modifierStrings.append(
"eras."+requestedEra)
2071 modifiers.append(getattr(eras,requestedEra))
2074 if hasattr(self.
_options,
"procModifiers")
and self._options.procModifiers:
2077 for pm
in self._options.procModifiers.split(
','):
2078 modifierStrings.append(pm)
2079 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2080 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2083 self.
pythonCfgCode +=
"process = cms.Process('"+self._options.name+
"'" 2086 if len(modifierStrings)>0:
2092 if self.process ==
None:
2093 if len(modifiers)>0:
2094 self.process = cms.Process(self._options.name,*modifiers)
2096 self.process = cms.Process(self._options.name)
2102 """ Prepare the configuration string and add missing pieces.""" 2114 outputModuleCfgCode=
"" 2115 if not 'HARVESTING' in self.stepMap.keys()
and not 'ALCAHARVEST' in self.stepMap.keys()
and not 'ALCAOUTPUT' in self.stepMap.keys()
and self.with_output:
2120 self.
pythonCfgCode +=
"# import of standard configurations\n" 2121 for module
in self.imports:
2125 if not hasattr(self.process,
"configurationMetadata"):
2129 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2132 for comment,object
in self.addedObjects:
2144 nl=sorted(self.additionalOutputs.keys())
2146 output = self.additionalOutputs[name]
2147 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2148 tmpOut = cms.EndPath(output)
2149 setattr(self.process,name+
'OutPath',tmpOut)
2150 self.schedule.append(tmpOut)
2154 for command
in self.additionalCommands:
2158 for object
in self._options.inlineObjets.split(
','):
2161 if not hasattr(self.process,object):
2162 print 'cannot inline -'+object+
'- : not known' 2169 for path
in self.process.paths:
2170 if getattr(self.process,path)
not in self.blacklist_paths:
2173 for endpath
in self.process.endpaths:
2174 if getattr(self.process,endpath)
not in self.blacklist_paths:
2179 result =
"process.schedule = cms.Schedule(" 2182 self.process.schedule = cms.Schedule()
2183 for item
in self.schedule:
2184 if not isinstance(item, cms.Schedule):
2185 self.process.schedule.append(item)
2187 self.process.schedule.extend(item)
2189 if hasattr(self.process,
"HLTSchedule"):
2190 beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2191 afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2192 pathNames = [
'process.'+p.label_()
for p
in beforeHLT]
2193 result +=
','.
join(pathNames)+
')\n' 2194 result +=
'process.schedule.extend(process.HLTSchedule)\n' 2195 pathNames = [
'process.'+p.label_()
for p
in afterHLT]
2196 result +=
'process.schedule.extend(['+
','.
join(pathNames)+
'])\n' 2198 pathNames = [
'process.'+p.label_()
for p
in self.schedule]
2199 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2203 for labelToAssociate
in self.labelsToAssociate:
2204 self.process.schedule.associate(getattr(self.process, labelToAssociate))
2205 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2209 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2212 if self._options.nThreads
is not "1":
2215 self.
pythonCfgCode +=
"process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+
")\n" 2216 self.
pythonCfgCode +=
"process.options.numberOfStreams=cms.untracked.uint32(0)\n" 2218 if self._options.isRepacked:
2220 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2222 MassReplaceInputTag(self.process)
2225 if self.productionFilterSequence:
2226 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2228 if len(self.conditionalPaths):
2229 self.
pythonCfgCode +=
'\tif not path in %s: continue\n'%
str(self.conditionalPaths)
2230 if len(self.excludedPaths):
2232 self.
pythonCfgCode +=
'\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2233 pfs = getattr(self.process,self.productionFilterSequence)
2234 for path
in self.process.paths:
2235 if not path
in self.conditionalPaths:
continue 2236 if path
in self.excludedPaths:
continue 2237 getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2243 if self._options.runUnscheduled:
2246 self.
pythonCfgCode+=
"#do not add changes to your config after this point (unless you know what you are doing)\n" 2247 self.
pythonCfgCode+=
"from FWCore.ParameterSet.Utilities import convertToUnscheduled\n" 2248 self.
pythonCfgCode+=
"process=convertToUnscheduled(process)\n" 2250 from FWCore.ParameterSet.Utilities
import convertToUnscheduled
2257 if hasattr(self.process,
"logErrorHarvester"):
2259 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2260 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2261 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2262 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2269 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2270 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2271 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2273 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2279 if self._options.io:
2281 if not self._options.io.endswith(
'.io'): self._option.io+=
'.io' 2282 io=open(self._options.io,
'w')
2284 if hasattr(self.process.source,
"fileNames"):
2285 if len(self.process.source.fileNames.value()):
2286 ioJson[
'primary']=self.process.source.fileNames.value()
2287 if hasattr(self.process.source,
"secondaryFileNames"):
2288 if len(self.process.source.secondaryFileNames.value()):
2289 ioJson[
'secondary']=self.process.source.secondaryFileNames.value()
2290 if self._options.pileup_input
and (self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:')):
2291 ioJson[
'pileup']=self._options.pileup_input[4:]
2292 for (o,om)
in self.process.outputModules_().
items():
2293 ioJson[o]=om.fileName.value()
2294 ioJson[
'GT']=self.process.GlobalTag.globaltag.value()
2295 if self.productionFilterSequence:
2296 ioJson[
'filter']=self.productionFilterSequence
2298 io.write(json.dumps(ioJson))
def load(self, includeFile)
def filesFromOption(self)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def prepare_L1REPACK(self, sequence=None)
def finalizeFastSimHLT(self)
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def doNotInlineEventContent(instance, label="cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)")
event content
def customiseEarlyDelete(process)
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_RECO(self, sequence="reconstruction")
put the filtering path in the schedule
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def prepare_PATGEN(self, sequence="miniGEN")
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def prepare_RECOSIM(self, sequence="recosim")
def addCustomise(self, unsch=0)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
now for a useful command
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, sequence=None)
def convertToUnscheduled(proc)
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_NANO(self, sequence="nanoAOD")
inliner
needs to be in reverse order
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
def prepare_ALCAHARVEST(self, sequence=None)
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def completeInputCommand(self)
add the corresponding input content
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def addStandardSequences(self)
def prepare_PAT(self, sequence="miniAOD")