3 __version__ =
"$Revision: 1.19 $" 4 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 6 import FWCore.ParameterSet.Config
as cms
7 from FWCore.ParameterSet.Modules
import _Module
11 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
16 from subprocess
import Popen,PIPE
17 import FWCore.ParameterSet.DictTypes
as DictTypes
23 defaultOptions.datamix =
'DataOnSim' 24 defaultOptions.isMC=
False 25 defaultOptions.isData=
True 26 defaultOptions.step=
'' 27 defaultOptions.pileup=
'NoPileUp' 28 defaultOptions.pileup_input =
None 29 defaultOptions.pileup_dasoption =
'' 30 defaultOptions.geometry =
'SimDB' 31 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
32 defaultOptions.magField =
'' 33 defaultOptions.conditions =
None 34 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
35 defaultOptions.harvesting=
'AtRunEnd' 36 defaultOptions.gflash =
False 37 defaultOptions.number = -1
38 defaultOptions.number_out =
None 39 defaultOptions.arguments =
"" 40 defaultOptions.name =
"NO NAME GIVEN" 41 defaultOptions.evt_type =
"" 42 defaultOptions.filein =
"" 43 defaultOptions.dasquery=
"" 44 defaultOptions.dasoption=
"" 45 defaultOptions.secondfilein =
"" 46 defaultOptions.customisation_file = []
47 defaultOptions.customisation_file_unsch = []
48 defaultOptions.customise_commands =
"" 49 defaultOptions.inline_custom=
False 50 defaultOptions.particleTable =
'pythiapdt' 51 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
52 defaultOptions.dirin =
'' 53 defaultOptions.dirout =
'' 54 defaultOptions.filetype =
'EDM' 55 defaultOptions.fileout =
'output.root' 56 defaultOptions.filtername =
'' 57 defaultOptions.lazy_download =
False 58 defaultOptions.custom_conditions =
'' 59 defaultOptions.hltProcess =
'' 60 defaultOptions.eventcontent =
None 61 defaultOptions.datatier =
None 62 defaultOptions.inlineEventContent =
True 63 defaultOptions.inlineObjets =
'' 64 defaultOptions.hideGen=
False 65 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
66 defaultOptions.beamspot=
None 67 defaultOptions.outputDefinition =
'' 68 defaultOptions.inputCommands =
None 69 defaultOptions.outputCommands =
None 70 defaultOptions.inputEventContent =
'' 71 defaultOptions.dropDescendant =
False 72 defaultOptions.relval =
None 73 defaultOptions.profile =
None 74 defaultOptions.isRepacked =
False 75 defaultOptions.restoreRNDSeeds =
False 76 defaultOptions.donotDropOnInput =
'' 77 defaultOptions.python_filename =
'' 78 defaultOptions.io=
None 79 defaultOptions.lumiToProcess=
None 80 defaultOptions.fast=
False 81 defaultOptions.runsAndWeightsForMC =
None 82 defaultOptions.runsScenarioForMC =
None 83 defaultOptions.runUnscheduled =
False 84 defaultOptions.timeoutOutput =
False 85 defaultOptions.nThreads =
'1' 89 theObject = getattr(process,name)
90 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
91 return "process."+name+
" = " + theObject.dumpPython(
"process")
92 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
93 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 95 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 98 import FWCore.ParameterSet.Config
as cms
101 for line
in open(fileName,
'r'): 102 if line.count(
".root")>=2:
104 entries=line.replace(
"\n",
"").
split()
105 if not entries[0]
in prim:
106 prim.append(entries[0])
107 if not entries[1]
in sec:
108 sec.append(entries[1])
109 elif (line.find(
".root")!=-1):
110 entry=line.replace(
"\n",
"")
111 if not entry
in prim:
114 if not hasattr(s,
"fileNames"):
115 s.fileNames=cms.untracked.vstring(prim)
117 s.fileNames.extend(prim)
119 if not hasattr(s,
"secondaryFileNames"):
120 s.secondaryFileNames=cms.untracked.vstring(sec)
122 s.secondaryFileNames.extend(sec)
123 print "found files: ",prim
125 raise Exception(
"There are not files in input from the file list")
127 print "found parent files:",sec
132 import FWCore.ParameterSet.Config
as cms
135 print "the query is",query
138 while eC!=0
and count<3:
140 print 'Sleeping, then retrying DAS' 142 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True)
144 tupleP = os.waitpid(p.pid, 0)
148 print "DAS succeeded after",count,
"attempts",eC
150 print "DAS failed 3 times- I give up" 151 for line
in pipe.split(
'\n'):
152 if line.count(
".root")>=2:
154 entries=line.replace(
"\n",
"").
split()
155 if not entries[0]
in prim:
156 prim.append(entries[0])
157 if not entries[1]
in sec:
158 sec.append(entries[1])
159 elif (line.find(
".root")!=-1):
160 entry=line.replace(
"\n",
"")
161 if not entry
in prim:
164 if not hasattr(s,
"fileNames"):
165 s.fileNames=cms.untracked.vstring(prim)
167 s.fileNames.extend(prim)
169 if not hasattr(s,
"secondaryFileNames"):
170 s.secondaryFileNames=cms.untracked.vstring(sec)
172 s.secondaryFileNames.extend(sec)
173 print "found files: ",prim
175 print "found parent files:",sec
178 def anyOf(listOfKeys,dict,opt=None):
187 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
190 """The main building routines """ 192 def __init__(self, options, process = None, with_output = False, with_input = False ):
193 """options taken from old cmsDriver and optparse """ 195 options.outfile_name = options.dirout+options.fileout
199 if self._options.isData
and options.isMC:
200 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
205 if 'ENDJOB' in self._options.step:
206 if (hasattr(self.
_options,
"outputDefinition")
and \
207 self._options.outputDefinition !=
'' and \
208 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self._options.outputDefinition)))
or \
209 (hasattr(self.
_options,
"datatier")
and \
210 self._options.datatier
and \
211 'DQMIO' in self._options.datatier):
212 print "removing ENDJOB from steps since not compatible with DQMIO dataTier" 213 self._options.step=self._options.step.replace(
',ENDJOB',
'')
218 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
221 for step
in self._options.step.split(
","):
222 if step==
'':
continue 223 stepParts = step.split(
":")
224 stepName = stepParts[0]
225 if stepName
not in stepList
and not stepName.startswith(
're'):
226 raise ValueError(
"Step "+stepName+
" unknown")
227 if len(stepParts)==1:
228 self.stepMap[stepName]=
"" 229 elif len(stepParts)==2:
230 self.stepMap[stepName]=stepParts[1].
split(
'+')
231 elif len(stepParts)==3:
232 self.stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
234 raise ValueError(
"Step definition "+step+
" invalid")
235 self.stepKeys.append(stepName)
239 self.with_output = with_output
242 if hasattr(self.
_options,
"no_output_flag")
and self._options.no_output_flag:
243 self.with_output =
False 244 self.with_input = with_input
248 self.schedule =
list()
254 self.additionalCommands = []
256 self.blacklist_paths = []
257 self.addedObjects = []
258 self.additionalOutputs = {}
260 self.productionFilterSequence =
None 261 self.labelsToAssociate=[]
262 self.nextScheduleIsConditional=
False 263 self.conditionalPaths=[]
264 self.excludedPaths=[]
269 Function to add the igprof profile service so that you can dump in the middle 272 profileOpts = self._options.profile.split(
':')
274 profilerInterval = 100
275 profilerFormat =
None 276 profilerJobFormat =
None 282 startEvent = profileOpts.pop(0)
283 if not startEvent.isdigit():
284 raise Exception(
"%s is not a number" % startEvent)
285 profilerStart =
int(startEvent)
287 eventInterval = profileOpts.pop(0)
288 if not eventInterval.isdigit():
289 raise Exception(
"%s is not a number" % eventInterval)
290 profilerInterval =
int(eventInterval)
292 profilerFormat = profileOpts.pop(0)
295 if not profilerFormat:
296 profilerFormat =
"%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace(
"_cfi",
""),
298 self._options.pileup,
299 self._options.conditions,
300 self._options.datatier,
301 self._options.profileTypeLabel)
302 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
303 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
304 elif not profilerJobFormat:
305 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 307 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
310 includeFile = includeFile.replace(
'/',
'.')
311 self.process.load(includeFile)
312 return sys.modules[includeFile]
315 """helper routine to load am memorize imports""" 318 includeFile = includeFile.replace(
'/',
'.')
319 self.imports.append(includeFile)
320 self.process.load(includeFile)
321 return sys.modules[includeFile]
324 """helper routine to remember replace statements""" 325 self.additionalCommands.append(command)
326 if not command.strip().startswith(
"#"):
329 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
333 if 'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys():
334 self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring(
'ProductNotFound'),fileMode = cms.untracked.string(
'FULLMERGE'))
336 self.process.options = cms.untracked.PSet( )
338 self.addedObjects.append((
"",
"options"))
340 if self._options.lazy_download:
341 self.process.AdaptorConfig = cms.Service(
"AdaptorConfig",
342 stats = cms.untracked.bool(
True),
343 enable = cms.untracked.bool(
True),
344 cacheHint = cms.untracked.string(
"lazy-download"),
345 readHint = cms.untracked.string(
"read-ahead-buffered")
347 self.addedObjects.append((
"Setup lazy download",
"AdaptorConfig"))
352 if self._options.profile:
354 self.process.IgProfService = cms.Service(
"IgProfService",
355 reportFirstEvent = cms.untracked.int32(start),
356 reportEventInterval = cms.untracked.int32(interval),
357 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
358 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
359 self.addedObjects.append((
"Setup IGProf Service for profiling",
"IgProfService"))
362 """Here we decide how many evts will be processed""" 363 self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(
int(self._options.number)))
364 if self._options.number_out:
365 self.process.maxEvents.output = cms.untracked.int32(
int(self._options.number_out))
366 self.addedObjects.append((
"",
"maxEvents"))
369 """Here the source is built. Priority: file, generator""" 370 self.addedObjects.append((
"Input source",
"source"))
373 for entry
in self._options.filein.split(
','):
375 if entry.startswith(
"filelist:"):
377 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
378 filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
380 self.process.source.fileNames.append(self._options.dirin+entry)
381 if self._options.secondfilein:
382 if not hasattr(self.process.source,
"secondaryFileNames"):
383 raise Exception(
"--secondfilein not compatible with "+self._options.filetype+
"input type")
384 for entry
in self._options.secondfilein.split(
','):
386 if entry.startswith(
"filelist:"):
387 self.process.source.secondaryFileNames.extend((
filesFromList(entry[9:]))[0])
388 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
389 self.process.source.secondaryFileNames.extend((
filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
391 self.process.source.secondaryFileNames.append(self._options.dirin+entry)
393 if self._options.filein
or self._options.dasquery:
394 if self._options.filetype ==
"EDM":
395 self.process.source=cms.Source(
"PoolSource",
396 fileNames = cms.untracked.vstring(),
397 secondaryFileNames= cms.untracked.vstring())
399 elif self._options.filetype ==
"DAT":
400 self.process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
402 elif self._options.filetype ==
"LHE":
403 self.process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
404 if self._options.filein.startswith(
"lhe:"):
406 args=self._options.filein.split(
':')
408 print 'LHE input from article ',article
409 location=
'/store/lhe/' 411 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
412 for line
in textOfFiles:
413 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
414 self.process.source.fileNames.append(location+article+
'/'+fileName)
417 print 'Issue to load LHE files, please check and try again.' 420 if len(self.process.source.fileNames)==0:
421 print 'Issue with empty filename, but can pass line check' 424 self.process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
428 elif self._options.filetype ==
"DQM":
429 self.process.source=cms.Source(
"DQMRootSource",
430 fileNames = cms.untracked.vstring())
433 elif self._options.filetype ==
"DQMDAQ":
435 self.process.source=cms.Source(
"DQMStreamerReader")
438 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
439 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
441 if self._options.dasquery!=
'':
442 self.process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
443 filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
445 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
446 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
449 if 'GEN' in self.stepMap.keys():
450 if self._options.inputCommands:
451 self._options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 453 self._options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 455 if self.process.source
and self._options.inputCommands:
456 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
457 for command
in self._options.inputCommands.split(
','):
459 command = command.strip()
460 if command==
'':
continue 461 self.process.source.inputCommands.append(command)
462 if not self._options.dropDescendant:
463 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
465 if self._options.lumiToProcess:
466 import FWCore.PythonUtilities.LumiList
as LumiList
467 self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange(
LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().
split(
',') )
469 if 'GEN' in self.stepMap.keys()
or 'LHE' in self.stepMap
or (
not self._options.filein
and hasattr(self.
_options,
"evt_type")):
470 if self.process.source
is None:
471 self.process.source=cms.Source(
"EmptySource")
474 self.runsAndWeights=
None 475 if self._options.runsAndWeightsForMC
or self._options.runsScenarioForMC :
476 if not self._options.isMC :
477 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
478 if self._options.runsAndWeightsForMC:
479 self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
481 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
482 if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
483 __import__(RunsAndWeights[self._options.runsScenarioForMC])
484 self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
486 self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
488 if self.runsAndWeights:
489 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
491 self.additionalCommands.append(
'import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
492 self.additionalCommands.append(
'ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
497 """ Add output module to the process """ 499 if self._options.outputDefinition:
500 if self._options.datatier:
501 print "--datatier & --eventcontent options ignored" 504 outList = eval(self._options.outputDefinition)
505 for (id,outDefDict)
in enumerate(outList):
506 outDefDictStr=outDefDict.__str__()
507 if not isinstance(outDefDict,dict):
508 raise Exception(
"--output needs to be passed a list of dict"+self._options.outputDefinition+
" is invalid")
510 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
513 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
514 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
515 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
516 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
517 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
519 if not theModuleLabel:
520 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
521 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
522 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 524 for name
in tryNames:
525 if not hasattr(self.process,name):
528 if not theModuleLabel:
529 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
531 defaultFileName=self._options.outfile_name
533 defaultFileName=self._options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
535 theFileName=self._options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
536 if not theFileName.endswith(
'.root'):
539 if len(outDefDict.keys()):
540 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
541 if theStreamType==
'DQMIO': theStreamType=
'DQM' 542 if theStreamType==
'ALL':
543 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
545 theEventContent = getattr(self.process, theStreamType+
"EventContent")
549 if theStreamType==
'ALCARECO' and not theFilterName:
550 theFilterName=
'StreamALCACombined' 553 CppType=
'PoolOutputModule' 554 if self._options.timeoutOutput:
555 CppType=
'TimeoutPoolOutputModule' 556 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 557 output = cms.OutputModule(CppType,
558 theEventContent.clone(),
559 fileName = cms.untracked.string(theFileName),
560 dataset = cms.untracked.PSet(
561 dataTier = cms.untracked.string(theTier),
562 filterName = cms.untracked.string(theFilterName))
564 if not theSelectEvent
and hasattr(self.process,
'generation_step')
and theStreamType!=
'LHE':
565 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
566 if not theSelectEvent
and hasattr(self.process,
'filtering_step'):
567 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
569 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
572 if not hasattr(output,
'SelectEvents'):
573 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
574 for alca
in self.AlCaPaths:
575 output.SelectEvents.SelectEvents.extend(getattr(self.process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
578 if hasattr(self.process,theModuleLabel):
579 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
581 setattr(self.process,theModuleLabel,output)
582 outputModule=getattr(self.process,theModuleLabel)
583 setattr(self.process,theModuleLabel+
'_step',cms.EndPath(outputModule))
584 path=getattr(self.process,theModuleLabel+
'_step')
585 self.schedule.append(path)
587 if not self._options.inlineEventContent
and hasattr(self.process,theStreamType+
"EventContent"):
590 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
591 if theExtraOutputCommands:
592 if not isinstance(theExtraOutputCommands,list):
593 raise Exception(
"extra ouput command in --option must be a list of strings")
594 if hasattr(self.process,theStreamType+
"EventContent"):
595 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
597 outputModule.outputCommands.extend(theExtraOutputCommands)
599 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
604 streamTypes=self._options.eventcontent.split(
',')
605 tiers=self._options.datatier.split(
',')
606 if not self._options.outputDefinition
and len(streamTypes)!=len(tiers):
607 raise Exception(
"number of event content arguments does not match number of datatier arguments")
610 if self._options.step.split(
',')[0].
split(
':')[0] ==
'ALCA':
613 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
614 if streamType==
'':
continue 615 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self._options.step:
continue 616 if streamType==
'DQMIO': streamType=
'DQM' 617 eventContent=streamType
619 if streamType ==
"NANOEDMAOD" :
620 eventContent =
"NANOAOD" 621 elif streamType ==
"NANOEDMAODSIM" :
622 eventContent =
"NANOAODSIM" 623 theEventContent = getattr(self.process, eventContent+
"EventContent")
625 theFileName=self._options.outfile_name
626 theFilterName=self._options.filtername
628 theFileName=self._options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
629 theFilterName=self._options.filtername
630 CppType=
'PoolOutputModule' 631 if self._options.timeoutOutput:
632 CppType=
'TimeoutPoolOutputModule' 633 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 634 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 635 output = cms.OutputModule(CppType,
637 fileName = cms.untracked.string(theFileName),
638 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
639 filterName = cms.untracked.string(theFilterName)
642 if hasattr(self.process,
"generation_step")
and streamType!=
'LHE':
643 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
644 if hasattr(self.process,
"filtering_step"):
645 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
647 if streamType==
'ALCARECO':
648 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
650 if "MINIAOD" in streamType:
651 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
654 outputModuleName=streamType+
'output' 655 setattr(self.process,outputModuleName,output)
656 outputModule=getattr(self.process,outputModuleName)
657 setattr(self.process,outputModuleName+
'_step',cms.EndPath(outputModule))
658 path=getattr(self.process,outputModuleName+
'_step')
659 self.schedule.append(path)
661 if self._options.outputCommands
and streamType!=
'DQM':
662 for evct
in self._options.outputCommands.split(
','):
663 if not evct:
continue 664 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
666 if not self._options.inlineEventContent:
667 tmpstreamType=streamType
668 if "NANOEDM" in tmpstreamType :
669 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
672 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
674 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
680 Add selected standard sequences to the process 683 if self._options.pileup:
684 pileupSpec=self._options.pileup.split(
',')[0]
687 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
688 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
689 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
693 if '.' in pileupSpec:
694 mixingDict={
'file':pileupSpec}
695 elif pileupSpec.startswith(
'file:'):
696 mixingDict={
'file':pileupSpec[5:]}
699 mixingDict=copy.copy(Mixing[pileupSpec])
700 if len(self._options.pileup.split(
','))>1:
701 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(
',')+1:]))
704 if 'file:' in pileupSpec:
706 self.process.load(mixingDict[
'file'])
707 print "inlining mixing module configuration" 708 self._options.inlineObjets+=
',mix' 710 self.loadAndRemember(mixingDict[
'file'])
712 mixingDict.pop(
'file')
713 if not "DATAMIX" in self.stepMap.keys():
714 if self._options.pileup_input:
715 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
716 mixingDict[
'F']=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
717 elif self._options.pileup_input.startswith(
"filelist:"):
718 mixingDict[
'F']=(
filesFromList(self._options.pileup_input[9:]))[0]
720 mixingDict[
'F']=self._options.pileup_input.split(
',')
722 for command
in specialization:
723 self.executeAndRemember(command)
724 if len(mixingDict)!=0:
725 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
730 if len(self.stepMap):
731 self.loadAndRemember(self.GeometryCFF)
732 if (
'SIM' in self.stepMap
or 'reSIM' in self.stepMap)
and not self._options.fast:
733 self.loadAndRemember(self.SimGeometryCFF)
734 if self.geometryDBLabel:
735 self.executeAndRemember(
'process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
737 print "Geometry option",self._options.geometry,
"unknown." 740 if len(self.stepMap):
741 self.loadAndRemember(self.magFieldCFF)
743 for stepName
in self.stepKeys:
744 stepSpec = self.stepMap[stepName]
745 print "Step:", stepName,
"Spec:",stepSpec
746 if stepName.startswith(
're'):
748 if stepName[2:]
not in self._options.donotDropOnInput:
749 self._options.inputEventContent=
'%s,%s'%(stepName.upper(),self._options.inputEventContent)
750 stepName=stepName[2:]
752 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
753 elif type(stepSpec)==list:
754 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
755 elif type(stepSpec)==tuple:
756 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
758 raise ValueError(
"Invalid step definition")
760 if self._options.restoreRNDSeeds!=
False:
762 if self._options.restoreRNDSeeds==
True:
763 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
765 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
766 if self._options.inputEventContent
or self._options.inputCommands:
767 if self._options.inputCommands:
768 self._options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 770 self._options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 774 if self._options.inputEventContent:
776 def dropSecondDropStar(iec):
787 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
788 for evct
in self._options.inputEventContent.split(
','):
789 if evct==
'':
continue 790 theEventContent = getattr(self.process, evct+
"EventContent")
791 if hasattr(theEventContent,
'outputCommands'):
792 self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
793 if hasattr(theEventContent,
'inputCommands'):
794 self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
796 dropSecondDropStar(self.process.source.inputCommands)
798 if not self._options.dropDescendant:
799 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
805 """Add conditions to the process""" 806 if not self._options.conditions:
return 808 if 'FrontierConditions_GlobalTag' in self._options.conditions:
809 print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line' 810 self._options.conditions = self._options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
812 self.loadAndRemember(self.ConditionsDefaultCFF)
814 self.process.GlobalTag =
GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
815 self.additionalCommands.append(
'from Configuration.AlCa.GlobalTag import GlobalTag')
816 self.additionalCommands.append(
'process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
820 """Include the customise code """ 824 for c
in self._options.customisation_file:
825 custOpt.extend(c.split(
","))
827 for c
in self._options.customisation_file_unsch:
828 custOpt.extend(c.split(
","))
834 raise Exception(
"more than . in the specification:"+opt)
835 fileName=opt.split(
'.')[0]
836 if opt.count(
'.')==0: rest=
'customise' 838 rest=opt.split(
'.')[1]
839 if rest==
'py': rest=
'customise' 841 if fileName
in custMap:
842 custMap[fileName].extend(rest.split(
'+'))
844 custMap[fileName]=rest.split(
'+')
849 final_snippet=
'\n# customisation of the process.\n' 853 allFcn.extend(custMap[opt])
855 if allFcn.count(fcn)!=1:
856 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
860 packageName = f.replace(
".py",
"").
replace(
"/",
".")
861 __import__(packageName)
862 package = sys.modules[packageName]
865 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
867 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 868 if self._options.inline_custom:
869 for line
in file(customiseFile,
'r'): 870 if "import FWCore.ParameterSet.Config" in line:
872 final_snippet += line
874 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
875 for fcn
in custMap[f]:
876 print "customising the process with",fcn,
"from",f
877 if not hasattr(package,fcn):
879 raise Exception(
"config "+f+
" has no function "+fcn)
881 self.process=getattr(package,fcn)(self.process)
883 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
884 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
887 final_snippet +=
'\n# End of customisation functions\n' 893 final_snippet=
'\n# Customisation from command line\n' 894 if self._options.customise_commands:
896 for com
in self._options.customise_commands.split(
'\\n'):
897 com=string.lstrip(com)
899 final_snippet +=
'\n'+com
908 if len(self.stepMap):
910 if self._options.particleTable
not in defaultOptions.particleTableList:
911 print 'Invalid particle table provided. Options are:' 912 print defaultOptions.particleTable
915 if len(self.stepMap):
916 self.
loadAndRemember(
'SimGeneral.HepPDTESSource.'+self._options.particleTable+
'_cfi')
935 self.EIDefaultCFF=
None 936 self.SKIMDefaultCFF=
"Configuration/StandardSequences/Skims_cff" 937 self.POSTRECODefaultCFF=
"Configuration/StandardSequences/PostRecoGenerator_cff" 938 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/Validation_cff" 939 self.L1HwValDefaultCFF =
"Configuration/StandardSequences/L1HwVal_cff" 940 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOffline_cff" 941 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/Harvesting_cff" 942 self.ALCAHARVESTDefaultCFF=
"Configuration/StandardSequences/AlCaHarvesting_cff" 943 self.ENDJOBDefaultCFF=
"Configuration/StandardSequences/EndOfProcess_cff" 944 self.ConditionsDefaultCFF =
"Configuration/StandardSequences/FrontierConditions_GlobalTag_cff" 945 self.CFWRITERDefaultCFF =
"Configuration/StandardSequences/CrossingFrameWriter_cff" 946 self.REPACKDefaultCFF=
"Configuration/StandardSequences/DigiToRaw_Repack_cff" 948 if "DATAMIX" in self.stepMap.keys():
949 self.DATAMIXDefaultCFF=
"Configuration/StandardSequences/DataMixer"+self._options.datamix+
"_cff" 950 if self._options.datamix ==
'PreMix':
951 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiDMPreMix_cff" 955 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 957 if "DIGIPREMIX" in self.stepMap.keys():
958 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/Digi_PreMix_cff" 960 self.
L1EMDefaultCFF=
"Configuration/StandardSequences/SimL1EmulatorPreMix_cff" 962 self.ALCADefaultSeq=
None 963 self.LHEDefaultSeq=
'externalLHEProducer' 964 self.GENDefaultSeq=
'pgen' 965 self.SIMDefaultSeq=
'psim' 966 self.DIGIDefaultSeq=
'pdigi' 967 self.DIGIPREMIXDefaultSeq=
'pdigi' 968 self.DIGIPREMIX_S2DefaultSeq=
'pdigi' 969 self.DATAMIXDefaultSeq=
None 970 self.DIGI2RAWDefaultSeq=
'DigiToRaw' 971 self.HLTDefaultSeq=
'GRun' 972 self.L1DefaultSeq=
None 973 self.L1REPACKDefaultSeq=
'GT' 974 self.HARVESTINGDefaultSeq=
None 975 self.ALCAHARVESTDefaultSeq=
None 976 self.CFWRITERDefaultSeq=
None 977 self.RAW2DIGIDefaultSeq=
'RawToDigi' 978 self.L1RecoDefaultSeq=
'L1Reco' 979 self.L1TrackTriggerDefaultSeq=
'L1TrackTrigger' 980 if self._options.fast
or (
'RAW2DIGI' in self.stepMap
and 'RECO' in self.stepMap):
981 self.RECODefaultSeq=
'reconstruction' 983 self.RECODefaultSeq=
'reconstruction_fromRECO' 984 self.RECOSIMDefaultSeq=
'recosim' 985 self.EIDefaultSeq=
'top' 986 self.POSTRECODefaultSeq=
None 987 self.L1HwValDefaultSeq=
'L1HwVal' 988 self.DQMDefaultSeq=
'DQMOffline' 989 self.VALIDATIONDefaultSeq=
'' 990 self.ENDJOBDefaultSeq=
'endOfProcess' 991 self.REPACKDefaultSeq=
'DigiToRawRepack' 992 self.PATDefaultSeq=
'miniAOD' 993 self.PATGENDefaultSeq=
'miniGEN' 994 self.NANODefaultSeq=
'nanoSequence' 996 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContent_cff" 998 if not self._options.beamspot:
999 self._options.beamspot=VtxSmearedDefaultKey
1002 if self._options.isMC==
True:
1004 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 1005 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 1006 self.PATGENDefaultCFF=
"Configuration/StandardSequences/PATGEN_cff" 1007 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineMC_cff" 1008 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1009 self.NANODefaultSeq=
'nanoSequenceMC' 1011 self._options.beamspot =
None 1014 if 'reGEN' in self.stepMap:
1015 self.GENDefaultSeq=
'fixGenInfo' 1017 if self._options.scenario==
'cosmics':
1018 self._options.pileup=
'Cosmics' 1019 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1020 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1021 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1022 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentCosmics_cff" 1023 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationCosmics_cff" 1024 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmics_cff" 1025 if self._options.isMC==
True:
1026 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmicsMC_cff" 1027 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingCosmics_cff" 1028 self.RECODefaultSeq=
'reconstructionCosmics' 1029 self.DQMDefaultSeq=
'DQMOfflineCosmics' 1031 if self._options.scenario==
'HeavyIons':
1032 if not self._options.beamspot:
1033 self._options.beamspot=VtxSmearedHIDefaultKey
1034 self.HLTDefaultSeq =
'HIon' 1035 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationHeavyIons_cff" 1036 self.VALIDATIONDefaultSeq=
'' 1037 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentHeavyIons_cff" 1038 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1039 self.RECODefaultSeq=
'reconstructionHeavyIons' 1040 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1041 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIons_cff" 1042 self.DQMDefaultSeq=
'DQMOfflineHeavyIons' 1043 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1044 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingHeavyIons_cff" 1045 if self._options.isMC==
True:
1046 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff" 1049 self.RAW2RECODefaultSeq=
','.
join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1051 self.USERDefaultSeq=
'user' 1052 self.USERDefaultCFF=
None 1055 if self._options.isData:
1056 if self._options.magField==defaultOptions.magField:
1057 print "magnetic field option forced to: AutoFromDBCurrent" 1058 self._options.magField=
'AutoFromDBCurrent' 1059 self.magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace(
'.',
'')+
'_cff' 1060 self.magFieldCFF = self.magFieldCFF.replace(
"__",
'_')
1063 self.GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff' 1064 self.geometryDBLabel=
None 1066 if self._options.fast:
1067 if 'start' in self._options.conditions.lower():
1068 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1070 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1073 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1074 if opt
in GeometryConf:
1075 return GeometryConf[opt]
1079 geoms=self._options.geometry.split(
',')
1083 if '/' in geoms[1]
or '_cff' in geoms[1]:
1084 self.GeometryCFF=geoms[1]
1086 self.GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1088 if (geoms[0].startswith(
'DB:')):
1089 self.SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff' 1090 self.geometryDBLabel=geoms[0][3:]
1093 if '/' in geoms[0]
or '_cff' in geoms[0]:
1094 self.SimGeometryCFF=geoms[0]
1096 simGeometry=geoms[0]
1097 if self._options.gflash==
True:
1098 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1100 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1103 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1104 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1106 if self._options.scenario==
'nocoll' or self._options.scenario==
'cosmics':
1107 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1108 self._options.beamspot=
'NoSmear' 1111 if self._options.fast:
1112 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1113 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1114 self.RECOBEFMIXDefaultCFF =
'FastSimulation.Configuration.Reconstruction_BefMix_cff' 1115 self.RECOBEFMIXDefaultSeq =
'reconstruction_befmix' 1116 self.DQMOFFLINEDefaultCFF=
"FastSimulation.Configuration.DQMOfflineMC_cff" 1119 if self._options.pileup==
'default':
1120 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1121 self._options.pileup=MixingDefaultKey
1125 if self._options.isData:
1126 self._options.pileup=
None 1129 self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1134 output = cms.OutputModule(
"PoolOutputModule")
1135 if stream.selectEvents.parameters_().__len__()!=0:
1136 output.SelectEvents = stream.selectEvents
1138 output.SelectEvents = cms.untracked.PSet()
1139 output.SelectEvents.SelectEvents=cms.vstring()
1140 if isinstance(stream.paths,tuple):
1141 for path
in stream.paths:
1142 output.SelectEvents.SelectEvents.append(path.label())
1144 output.SelectEvents.SelectEvents.append(stream.paths.label())
1148 if isinstance(stream.content,str):
1149 evtPset=getattr(self.process,stream.content)
1150 for p
in evtPset.parameters_():
1151 setattr(output,p,getattr(evtPset,p))
1152 if not self._options.inlineEventContent:
1155 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1157 output.outputCommands = stream.content
1160 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1162 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1163 filterName = cms.untracked.string(stream.name))
1165 if self._options.filtername:
1166 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1169 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1171 if workflow
in (
"producers,full"):
1172 if isinstance(stream.paths,tuple):
1173 for path
in stream.paths:
1174 self.schedule.append(path)
1176 self.schedule.append(stream.paths)
1180 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1181 self.additionalOutputs[name] = output
1182 setattr(self.process,name,output)
1184 if workflow ==
'output':
1186 filterList = output.SelectEvents.SelectEvents
1187 for i, filter
in enumerate(filterList):
1188 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1198 if ( len(sequence.split(
'.'))==1 ):
1200 elif ( len(sequence.split(
'.'))==2 ):
1202 sequence=sequence.split(
'.')[1]
1204 print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a" 1205 print sequence,
"not recognized" 1212 for i,s
in enumerate(seq.split(
'*')):
1214 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1216 p=getattr(self.process,prefix)
1217 p+=getattr(self.process, s)
1218 self.schedule.append(getattr(self.process,prefix))
1223 if self.nextScheduleIsConditional:
1224 self.conditionalPaths.append(prefix)
1225 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1226 self.schedule.append(getattr(self.process,prefix))
1228 for i,s
in enumerate(seq.split(
'+')):
1230 setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1231 self.schedule.append(getattr(self.process,sn))
1245 """ Enrich the process with alca streams """ 1247 sequence = sequence.split(
'.')[-1]
1250 alcaList = sequence.split(
"+")
1252 from Configuration.AlCa.autoAlca
import autoAlca
1256 for name
in alcaConfig.__dict__:
1257 alcastream = getattr(alcaConfig,name)
1258 shortName = name.replace(
'ALCARECOStream',
'')
1259 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1260 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1261 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1262 self.AlCaPaths.append(shortName)
1263 if 'DQM' in alcaList:
1264 if not self._options.inlineEventContent
and hasattr(self.process,name):
1265 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1267 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1270 if self._options.hltProcess
or 'HLT' in self.stepMap:
1271 if isinstance(alcastream.paths,tuple):
1272 for path
in alcastream.paths:
1277 for i
in range(alcaList.count(shortName)):
1278 alcaList.remove(shortName)
1281 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1282 path = getattr(alcaConfig,name)
1283 self.schedule.append(path)
1284 alcaList.remove(
'DQM')
1286 if isinstance(alcastream,cms.Path):
1288 self.blacklist_paths.append(alcastream)
1291 if len(alcaList) != 0:
1293 for name
in alcaConfig.__dict__:
1294 alcastream = getattr(alcaConfig,name)
1295 if isinstance(alcastream,cms.FilteredStream):
1296 available.append(name.replace(
'ALCARECOStream',
''))
1297 print "The following alcas could not be found "+
str(alcaList)
1298 print "available ",available
1300 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1305 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1306 print "Loading lhe fragment from",loadFragment
1307 __import__(loadFragment)
1308 self.process.load(loadFragment)
1310 self._options.inlineObjets+=
','+sequence
1312 getattr(self.process,sequence).nEvents =
int(self._options.number)
1315 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1316 self.excludedPaths.append(
"lhe_step")
1317 self.schedule.append( self.process.lhe_step )
1320 """ load the fragment of generator configuration """ 1325 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'')
1327 if not '/' in loadFragment:
1328 loadFragment=
'Configuration.Generator.'+loadFragment
1330 loadFragment=loadFragment.replace(
'/',
'.')
1332 print "Loading generator fragment from",loadFragment
1333 __import__(loadFragment)
1337 if not (self._options.filein
or self._options.dasquery):
1338 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1341 generatorModule=sys.modules[loadFragment]
1342 genModules=generatorModule.__dict__
1345 if self.LHEDefaultSeq
in genModules:
1346 del genModules[self.LHEDefaultSeq]
1348 if self._options.hideGen:
1349 self.loadAndRemember(loadFragment)
1351 self.process.load(loadFragment)
1353 import FWCore.ParameterSet.Modules
as cmstypes
1354 for name
in genModules:
1355 theObject = getattr(generatorModule,name)
1356 if isinstance(theObject, cmstypes._Module):
1357 self._options.inlineObjets=name+
','+self._options.inlineObjets
1358 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1359 self._options.inlineObjets+=
','+name
1361 if sequence == self.GENDefaultSeq
or sequence ==
'pgen_genonly':
1362 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1363 self.productionFilterSequence =
'ProductionFilterSequence' 1364 elif 'generator' in genModules:
1365 self.productionFilterSequence =
'generator' 1367 """ Enrich the schedule with the rest of the generation step """ 1368 self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1369 genSeqName=sequence.split(
'.')[-1]
1373 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1374 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1375 self.loadAndRemember(cffToBeLoaded)
1377 raise Exception(
"VertexSmearing type or beamspot "+self._options.beamspot+
" unknown.")
1379 if self._options.scenario ==
'HeavyIons':
1380 if self._options.pileup==
'HiMixGEN':
1381 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1383 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1385 self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1386 self.schedule.append(self.process.generation_step)
1389 self.executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1391 if 'reGEN' in self.stepMap:
1395 """ Enrich the schedule with the summary of the filter step """ 1397 self.loadAndRemember(
"GeneratorInterface/Core/genFilterSummary_cff")
1398 self.scheduleSequenceAtEnd(
'genFilterSummary',
'genfiltersummary_step')
1402 """ Enrich the schedule with the simulation step""" 1403 self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1404 if not self._options.fast:
1405 if self._options.gflash==
True:
1406 self.loadAndRemember(
"Configuration/StandardSequences/GFlashSIM_cff")
1408 if self._options.magField==
'0T':
1409 self.executeAndRemember(
"process.g4SimHits.UseMagneticField = cms.bool(False)")
1411 if self._options.magField==
'0T':
1412 self.executeAndRemember(
"process.famosSimHits.UseMagneticField = cms.bool(False)")
1414 self.scheduleSequence(sequence.split(
'.')[-1],
'simulation_step')
1418 """ Enrich the schedule with the digitisation step""" 1421 if self._options.gflash==
True:
1422 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1424 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1425 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1427 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and not self.process.source.type_()==
'EmptySource':
1428 if self._options.inputEventContent==
'':
1429 self._options.inputEventContent=
'REGEN' 1431 self._options.inputEventContent=self._options.inputEventContent+
',REGEN' 1438 """ Enrich the schedule with the digitisation step""" 1443 if sequence ==
'pdigi_valid':
1444 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1446 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1452 """ Enrich the schedule with the digitisation step""" 1453 self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1455 self.loadAndRemember(
"SimGeneral/MixingModule/digi_MixPreMix_cfi")
1458 if sequence ==
'pdigi_valid':
1459 self.executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1461 self.executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1463 self.scheduleSequence(sequence.split(
'.')[-1],
'digitisation_step')
1467 """ Enrich the schedule with the crossing frame writer step""" 1473 """ Enrich the schedule with the digitisation step""" 1477 if self._options.pileup_input:
1479 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
1480 theFiles=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1481 elif self._options.pileup_input.startswith(
"filelist:"):
1482 theFiles= (
filesFromList(self._options.pileup_input[9:]))[0]
1484 theFiles=self._options.pileup_input.split(
',')
1486 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1493 if "DIGIPREMIX" in self.stepMap.keys():
1495 self.
executeAndRemember(
"process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')")
1505 """ Enrich the schedule with the L1 simulation step""" 1506 assert(sequence ==
None)
1512 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1513 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT']
1514 if sequence
in supported:
1515 self.loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1516 if self._options.scenario ==
'HeavyIons':
1517 self.renameInputTagsInSequence(
"SimL1Emulator",
"rawDataCollector",
"rawDataRepacker")
1518 self.scheduleSequence(
'SimL1Emulator',
'L1RePack_step')
1520 print "L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported
1525 """ Enrich the schedule with the HLT simulation step""" 1527 print "no specification of the hlt menu has been given, should never happen" 1528 raise Exception(
'no HLT sequence provided')
1532 from Configuration.HLT.autoHLT
import autoHLT
1535 sequence = autoHLT[key]
1537 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1543 if self._options.scenario ==
'HeavyIons':
1544 optionsForHLT[
'type'] =
'HIon' 1546 optionsForHLT[
'type'] =
'GRun' 1547 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in optionsForHLT.iteritems())
1548 if sequence ==
'run,fromSource':
1549 if hasattr(self.process.source,
'firstRun'):
1550 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1551 elif hasattr(self.process.source,
'setRunNumber'):
1552 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1554 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1556 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1560 if self._options.isMC:
1561 self._options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1563 if self._options.name !=
'HLT':
1564 self.additionalCommands.append(
'from HLTrigger.Configuration.CustomConfigs import ProcessName')
1565 self.additionalCommands.append(
'process = ProcessName(process)')
1566 self.additionalCommands.append(
'')
1567 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1570 self.schedule.append(self.process.HLTSchedule)
1571 [self.blacklist_paths.append(path)
for path
in self.process.HLTSchedule
if isinstance(path,(cms.Path,cms.EndPath))]
1574 if self._options.fast:
1575 if not hasattr(self.process,
'HLTEndSequence'):
1576 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1581 seqReco=sequence.split(
',')[1]
1582 seqDigi=sequence.split(
',')[0]
1584 print "RAW2RECO requires two specifications",sequence,
"insufficient" 1598 self.
loadAndRemember(
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1600 for filt
in allMetFilterPaths:
1601 self.schedule.append(getattr(self.process,
'Flag_'+filt))
1604 ''' Enrich the schedule with L1 HW validation ''' 1607 print '\n\n\n DEPRECATED this has no action \n\n\n' 1611 ''' Enrich the schedule with L1 reconstruction ''' 1617 ''' Enrich the schedule with L1 reconstruction ''' 1623 ''' Enrich the schedule with a user defined filter sequence ''' 1625 filterConfig=self.load(sequence.split(
'.')[0])
1626 filterSeq=sequence.split(
'.')[-1]
1634 label=visitee.label()
1642 getattr(self.process,filterSeq).
visit( expander )
1643 self._options.inlineObjets+=
','+expander.inliner
1644 self._options.inlineObjets+=
','+filterSeq
1647 self.scheduleSequence(filterSeq,
'filtering_step')
1648 self.nextScheduleIsConditional=
True 1650 self.productionFilterSequence = filterSeq
1655 ''' Enrich the schedule with reconstruction ''' 1661 ''' Enrich the schedule with reconstruction ''' 1667 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1668 if not self._options.fast:
1669 print "ERROR: this step is only implemented for FastSim" 1672 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1676 ''' Enrich the schedule with PAT ''' 1679 self.labelsToAssociate.append(
'patTask')
1680 if not self._options.runUnscheduled:
1681 raise Exception(
"MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1682 if self._options.isData:
1683 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1685 if self._options.fast:
1686 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1688 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1690 if self._options.hltProcess:
1691 if len(self._options.customise_commands) > 1:
1692 self._options.customise_commands = self._options.customise_commands +
" \n" 1693 self._options.customise_commands = self._options.customise_commands +
"process.patTrigger.processName = \""+self._options.hltProcess+
"\"\n" 1694 self._options.customise_commands = self._options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1701 ''' Enrich the schedule with PATGEN ''' 1703 self.labelsToAssociate.append(
'patGENTask')
1704 if not self._options.runUnscheduled:
1705 raise Exception(
"MiniGEN production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1706 if self._options.isData:
1707 raise Exception(
"PATGEN step can only run on MC")
1711 ''' Enrich the schedule with NANO ''' 1714 custom =
"nanoAOD_customizeData" if self._options.isData
else "nanoAOD_customizeMC" 1715 if self._options.runUnscheduled:
1716 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1718 self._options.customisation_file.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1719 if self._options.hltProcess:
1720 if len(self._options.customise_commands) > 1:
1721 self._options.customise_commands = self._options.customise_commands +
" \n" 1722 self._options.customise_commands = self._options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1726 ''' Enrich the schedule with event interpretation ''' 1727 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1728 if sequence
in EventInterpretation:
1729 self.EIDefaultCFF = EventInterpretation[sequence]
1730 sequence =
'EIsequence' 1732 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1733 self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1734 self.scheduleSequence(sequence.split(
'.')[-1],
'eventinterpretaion_step')
1738 ''' Enrich the schedule with skimming fragments''' 1740 sequence = sequence.split(
'.')[-1]
1742 skimlist=sequence.split(
'+')
1744 from Configuration.Skimming.autoSkim
import autoSkim
1748 for skim
in skimConfig.__dict__:
1749 skimstream = getattr(skimConfig,skim)
1750 if isinstance(skimstream,cms.Path):
1752 self.blacklist_paths.append(skimstream)
1753 if (
not isinstance(skimstream,cms.FilteredStream)):
1755 shortname = skim.replace(
'SKIMStream',
'')
1756 if (sequence==
"all"):
1758 elif (shortname
in skimlist):
1761 if self._options.datatier==
'DQM':
1762 self.process.load(self.EVTCONTDefaultCFF)
1763 skimstreamDQM = cms.FilteredStream(
1764 responsible = skimstream.responsible,
1765 name = skimstream.name+
'DQM',
1766 paths = skimstream.paths,
1767 selectEvents = skimstream.selectEvents,
1768 content = self._options.datatier+
'EventContent',
1769 dataTier = cms.untracked.string(self._options.datatier)
1772 for i
in range(skimlist.count(shortname)):
1773 skimlist.remove(shortname)
1777 if (skimlist.__len__()!=0
and sequence!=
"all"):
1778 print 'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist)
1779 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1782 ''' Enrich the schedule with a user defined sequence ''' 1788 """ Enrich the schedule with the postreco step """ 1795 print sequence,
"in preparing validation" 1797 from Validation.Configuration.autoValidation
import autoValidation
1799 sequence=sequence.split(
'.')[-1]
1800 if sequence.find(
',')!=-1:
1801 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1802 valSeqName=sequence.split(
',')[1].
split(
'+')
1807 prevalSeqName=sequence.split(
'+')
1808 valSeqName=sequence.split(
'+')
1814 postfix=
'_'+sequence
1815 prevalSeqName=[
'prevalidation'+postfix]
1816 valSeqName=[
'validation'+postfix]
1817 if not hasattr(self.process,valSeqName[0]):
1819 valSeqName=[sequence]
1830 if (
'HLT' in self.stepMap
and not self._options.fast)
or self._options.hltProcess:
1831 for s
in valSeqName+prevalSeqName:
1834 for (i,s)
in enumerate(prevalSeqName):
1836 setattr(self.process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1837 self.schedule.append(getattr(self.process,
'prevalidation_step%s'%NFI(i)))
1839 for (i,s)
in enumerate(valSeqName):
1840 setattr(self.process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1841 self.schedule.append(getattr(self.process,
'validation_step%s'%NFI(i)))
1844 if 'PAT' in self.stepMap
and not 'RECO' in self.stepMap:
1847 if not 'DIGI' in self.stepMap
and not self._options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1848 if self._options.restoreRNDSeeds==
False and not self._options.restoreRNDSeeds==
True:
1849 self._options.restoreRNDSeeds=
True 1851 if not 'DIGI' in self.stepMap
and not self._options.fast:
1855 self._options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1857 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1859 for (i,s)
in enumerate(valSeqName):
1860 getattr(self.process,
'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,
'validation_step%s'%NFI(i))._seq
1866 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1867 It will climb down within PSets, VPSets and VInputTags to find its target""" 1868 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1875 if isinstance(pset, cms._Parameterizable):
1876 for name
in pset.parameters_().
keys():
1882 value = getattr(pset,name)
1883 type = value.pythonTypeName()
1884 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1885 self.
doIt(value,base+
"."+name)
1886 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1887 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1888 elif type
in (
'cms.string',
'cms.untracked.string'):
1890 if self.
_verbose:
print "set string process name %s.%s %s ==> %s"% (base, name, value, self.
_paramReplace)
1892 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1893 for (i,n)
in enumerate(value):
1894 if not isinstance(n, cms.InputTag):
1898 if self.
_verbose:
print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self.
_paramReplace)
1901 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1902 for (i,n)
in enumerate(value):
1905 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1907 if self.
_verbose:
print "set process name %s.%s %s ==> %s " % (base, name, value, self.
_paramReplace)
1908 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1913 label = visitee.label()
1914 except AttributeError:
1915 label =
'<Module not in a Process>' 1917 label =
'other execption' 1918 self.
doIt(visitee, label)
1925 print "Replacing all InputTag %s => %s"%(oldT,newT)
1928 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1929 if not loadMe
in self.additionalCommands:
1930 self.additionalCommands.append(loadMe)
1931 self.additionalCommands.append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1935 if self._options.hltProcess:
1936 proc=self._options.hltProcess
1938 proc=self.process.name_()
1939 if proc==HLTprocess:
return 1941 print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1943 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1944 self.additionalCommands.append(
'from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1945 self.additionalCommands.append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1951 while '@' in repr(seqList)
and level<maxLevel:
1953 for specifiedCommand
in seqList:
1954 if specifiedCommand.startswith(
'@'):
1955 location=specifiedCommand[1:]
1956 if not location
in mapping:
1957 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1958 mappedTo=mapping[location]
1960 mappedTo=mappedTo[index]
1961 seqList.remove(specifiedCommand)
1962 seqList.extend(mappedTo.split(
'+'))
1965 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1971 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1972 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1973 from DQMOffline.Configuration.autoDQM
import autoDQM
1977 if len(set(sequenceList))!=len(sequenceList):
1978 sequenceList=
list(set(sequenceList))
1979 print "Duplicate entries for DQM:, using",sequenceList
1981 pathName=
'dqmoffline_step' 1982 for (i,sequence)
in enumerate(sequenceList):
1984 pathName=
'dqmoffline_%d_step'%(i)
1986 if 'HLT' in self.stepMap.keys()
or self._options.hltProcess:
1989 setattr(self.process,pathName, cms.EndPath( getattr(self.process,sequence ) ) )
1990 self.schedule.append(getattr(self.process,pathName))
1992 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1994 getattr(self.process,pathName).
insert(0,self.process.genstepfilter)
1996 pathName=
'dqmofflineOnPAT_step' 1997 for (i,sequence)
in enumerate(postSequenceList):
1999 pathName=
'dqmofflineOnPAT_%d_step'%(i)
2001 setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
2002 self.schedule.append(getattr(self.process,pathName))
2005 """ Enrich the process with harvesting step """ 2006 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self._options.harvesting+
'_cff' 2010 sequence = sequence.split(
'.')[-1]
2013 harvestingList = sequence.split(
"+")
2014 from DQMOffline.Configuration.autoDQM
import autoDQM
2015 from Validation.Configuration.autoValidation
import autoValidation
2017 combined_mapping = copy.deepcopy( autoDQM )
2018 combined_mapping.update( autoValidation )
2019 self.
expandMapping(harvestingList,combined_mapping,index=-1)
2021 if len(set(harvestingList))!=len(harvestingList):
2022 harvestingList=
list(set(harvestingList))
2023 print "Duplicate entries for HARVESTING, using",harvestingList
2025 for name
in harvestingList:
2026 if not name
in harvestingConfig.__dict__:
2027 print name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
2029 harvestingstream = getattr(harvestingConfig,name)
2030 if isinstance(harvestingstream,cms.Path):
2031 self.schedule.append(harvestingstream)
2032 self.blacklist_paths.append(harvestingstream)
2033 if isinstance(harvestingstream,cms.Sequence):
2034 setattr(self.process,name+
"_step",cms.Path(harvestingstream))
2035 self.schedule.append(getattr(self.process,name+
"_step"))
2041 """ Enrich the process with AlCaHarvesting step """ 2043 sequence=sequence.split(
".")[-1]
2046 harvestingList = sequence.split(
"+")
2050 from Configuration.AlCa.autoPCL
import autoPCL
2053 for name
in harvestingConfig.__dict__:
2054 harvestingstream = getattr(harvestingConfig,name)
2055 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2056 self.schedule.append(harvestingstream)
2057 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2058 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2059 harvestingList.remove(name)
2061 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2062 self.schedule.append(lastStep)
2064 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2065 print "The following harvesting could not be found : ", harvestingList
2066 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2076 self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2077 self.schedule.append(self.process.reconstruction)
2081 """ Add useful info for the production. """ 2082 self.process.configurationMetadata=cms.untracked.PSet\
2083 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2084 name=cms.untracked.string(
"Applications"),
2085 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2088 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2093 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2094 self.
pythonCfgCode +=
"# with command line options: "+self._options.arguments+
'\n' 2095 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2100 modifierImports=[
'from Configuration.StandardSequences.Eras import eras']
2102 if hasattr(self.
_options,
"era")
and self._options.era :
2104 from Configuration.StandardSequences.Eras
import eras
2105 for requestedEra
in self._options.era.split(
",") :
2106 modifierStrings.append(
"eras."+requestedEra)
2107 modifiers.append(getattr(eras,requestedEra))
2110 if hasattr(self.
_options,
"procModifiers")
and self._options.procModifiers:
2113 for pm
in self._options.procModifiers.split(
','):
2114 modifierStrings.append(pm)
2115 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2116 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2119 self.
pythonCfgCode +=
"process = cms.Process('"+self._options.name+
"'" 2122 if len(modifierStrings)>0:
2128 if self.process ==
None:
2129 if len(modifiers)>0:
2130 self.process = cms.Process(self._options.name,*modifiers)
2132 self.process = cms.Process(self._options.name)
2138 """ Prepare the configuration string and add missing pieces.""" 2150 outputModuleCfgCode=
"" 2151 if not 'HARVESTING' in self.stepMap.keys()
and not 'ALCAHARVEST' in self.stepMap.keys()
and not 'ALCAOUTPUT' in self.stepMap.keys()
and self.with_output:
2156 self.
pythonCfgCode +=
"# import of standard configurations\n" 2157 for module
in self.imports:
2161 if not hasattr(self.process,
"configurationMetadata"):
2165 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2168 for comment,object
in self.addedObjects:
2180 nl=self.additionalOutputs.keys()
2183 output = self.additionalOutputs[name]
2184 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2185 tmpOut = cms.EndPath(output)
2186 setattr(self.process,name+
'OutPath',tmpOut)
2187 self.schedule.append(tmpOut)
2191 for command
in self.additionalCommands:
2195 for object
in self._options.inlineObjets.split(
','):
2198 if not hasattr(self.process,object):
2199 print 'cannot inline -'+object+
'- : not known' 2206 for path
in self.process.paths:
2207 if getattr(self.process,path)
not in self.blacklist_paths:
2210 for endpath
in self.process.endpaths:
2211 if getattr(self.process,endpath)
not in self.blacklist_paths:
2216 result =
"process.schedule = cms.Schedule(" 2219 self.process.schedule = cms.Schedule()
2220 for item
in self.schedule:
2221 if not isinstance(item, cms.Schedule):
2222 self.process.schedule.append(item)
2224 self.process.schedule.extend(item)
2226 if hasattr(self.process,
"HLTSchedule"):
2227 beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2228 afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2229 pathNames = [
'process.'+p.label_()
for p
in beforeHLT]
2230 result +=
','.
join(pathNames)+
')\n' 2231 result +=
'process.schedule.extend(process.HLTSchedule)\n' 2232 pathNames = [
'process.'+p.label_()
for p
in afterHLT]
2233 result +=
'process.schedule.extend(['+
','.
join(pathNames)+
'])\n' 2235 pathNames = [
'process.'+p.label_()
for p
in self.schedule]
2236 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2240 for labelToAssociate
in self.labelsToAssociate:
2241 self.process.schedule.associate(getattr(self.process, labelToAssociate))
2242 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2246 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2249 if self._options.nThreads
is not "1":
2252 self.
pythonCfgCode +=
"process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+
")\n" 2253 self.
pythonCfgCode +=
"process.options.numberOfStreams=cms.untracked.uint32(0)\n" 2255 if self._options.isRepacked:
2257 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2259 MassReplaceInputTag(self.process)
2262 if self.productionFilterSequence:
2263 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2265 if len(self.conditionalPaths):
2266 self.
pythonCfgCode +=
'\tif not path in %s: continue\n'%
str(self.conditionalPaths)
2267 if len(self.excludedPaths):
2269 self.
pythonCfgCode +=
'\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2270 pfs = getattr(self.process,self.productionFilterSequence)
2271 for path
in self.process.paths:
2272 if not path
in self.conditionalPaths:
continue 2273 if path
in self.excludedPaths:
continue 2274 getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2280 if self._options.runUnscheduled:
2283 self.
pythonCfgCode+=
"#do not add changes to your config after this point (unless you know what you are doing)\n" 2284 self.
pythonCfgCode+=
"from FWCore.ParameterSet.Utilities import convertToUnscheduled\n" 2285 self.
pythonCfgCode+=
"process=convertToUnscheduled(process)\n" 2287 from FWCore.ParameterSet.Utilities
import convertToUnscheduled
2294 if hasattr(self.process,
"logErrorHarvester"):
2296 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2297 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2298 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2299 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2306 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2307 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2308 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2310 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2316 if self._options.io:
2318 if not self._options.io.endswith(
'.io'): self._option.io+=
'.io' 2319 io=open(self._options.io,
'w')
2321 if hasattr(self.process.source,
"fileNames"):
2322 if len(self.process.source.fileNames.value()):
2323 ioJson[
'primary']=self.process.source.fileNames.value()
2324 if hasattr(self.process.source,
"secondaryFileNames"):
2325 if len(self.process.source.secondaryFileNames.value()):
2326 ioJson[
'secondary']=self.process.source.secondaryFileNames.value()
2327 if self._options.pileup_input
and (self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:')):
2328 ioJson[
'pileup']=self._options.pileup_input[4:]
2329 for (o,om)
in self.process.outputModules_().
items():
2330 ioJson[o]=om.fileName.value()
2331 ioJson[
'GT']=self.process.GlobalTag.globaltag.value()
2332 if self.productionFilterSequence:
2333 ioJson[
'filter']=self.productionFilterSequence
2335 io.write(json.dumps(ioJson))
def load(self, includeFile)
def filesFromOption(self)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def prepare_L1REPACK(self, sequence=None)
def finalizeFastSimHLT(self)
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def doNotInlineEventContent(instance, label="cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)")
event content
def customiseEarlyDelete(process)
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_DIGIPREMIX(self, sequence=None)
def prepare_RECO(self, sequence="reconstruction")
put the filtering path in the schedule
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def prepare_PATGEN(self, sequence="miniGEN")
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def prepare_RECOSIM(self, sequence="recosim")
def addCustomise(self, unsch=0)
def prepare_DIGIPREMIX_S2(self, sequence=None)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
now for a useful command
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, sequence=None)
def convertToUnscheduled(proc)
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_NANO(self, sequence="nanoAOD")
inliner
needs to be in reverse order
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
def prepare_ALCAHARVEST(self, sequence=None)
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def completeInputCommand(self)
add the corresponding input content
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def addStandardSequences(self)
def prepare_PAT(self, sequence="miniAOD")