3 __version__ =
"$Revision: 1.19 $" 4 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 6 import FWCore.ParameterSet.Config
as cms
7 from FWCore.ParameterSet.Modules
import _Module
11 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
16 from subprocess
import Popen,PIPE
17 import FWCore.ParameterSet.DictTypes
as DictTypes
23 defaultOptions.datamix =
'DataOnSim' 24 defaultOptions.isMC=
False 25 defaultOptions.isData=
True 26 defaultOptions.step=
'' 27 defaultOptions.pileup=
'NoPileUp' 28 defaultOptions.pileup_input =
None 29 defaultOptions.pileup_dasoption =
'' 30 defaultOptions.geometry =
'SimDB' 31 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
32 defaultOptions.magField =
'' 33 defaultOptions.conditions =
None 34 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
35 defaultOptions.harvesting=
'AtRunEnd' 36 defaultOptions.gflash =
False 37 defaultOptions.number = -1
38 defaultOptions.number_out =
None 39 defaultOptions.arguments =
"" 40 defaultOptions.name =
"NO NAME GIVEN" 41 defaultOptions.evt_type =
"" 42 defaultOptions.filein =
"" 43 defaultOptions.dasquery=
"" 44 defaultOptions.dasoption=
"" 45 defaultOptions.secondfilein =
"" 46 defaultOptions.customisation_file = []
47 defaultOptions.customisation_file_unsch = []
48 defaultOptions.customise_commands =
"" 49 defaultOptions.inline_custom=
False 50 defaultOptions.particleTable =
'pythiapdt' 51 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
52 defaultOptions.dirin =
'' 53 defaultOptions.dirout =
'' 54 defaultOptions.filetype =
'EDM' 55 defaultOptions.fileout =
'output.root' 56 defaultOptions.filtername =
'' 57 defaultOptions.lazy_download =
False 58 defaultOptions.custom_conditions =
'' 59 defaultOptions.hltProcess =
'' 60 defaultOptions.eventcontent =
None 61 defaultOptions.datatier =
None 62 defaultOptions.inlineEventContent =
True 63 defaultOptions.inlineObjets =
'' 64 defaultOptions.hideGen=
False 65 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
66 defaultOptions.beamspot=
None 67 defaultOptions.outputDefinition =
'' 68 defaultOptions.inputCommands =
None 69 defaultOptions.outputCommands =
None 70 defaultOptions.inputEventContent =
'' 71 defaultOptions.dropDescendant =
False 72 defaultOptions.relval =
None 73 defaultOptions.profile =
None 74 defaultOptions.isRepacked =
False 75 defaultOptions.restoreRNDSeeds =
False 76 defaultOptions.donotDropOnInput =
'' 77 defaultOptions.python_filename =
'' 78 defaultOptions.io=
None 79 defaultOptions.lumiToProcess=
None 80 defaultOptions.fast=
False 81 defaultOptions.runsAndWeightsForMC =
None 82 defaultOptions.runsScenarioForMC =
None 83 defaultOptions.runUnscheduled =
False 84 defaultOptions.timeoutOutput =
False 85 defaultOptions.nThreads =
'1' 89 theObject = getattr(process,name)
90 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
91 return "process."+name+
" = " + theObject.dumpPython(
"process")
92 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
93 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 95 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 98 import FWCore.ParameterSet.Config
as cms
101 for line
in open(fileName,
'r'): 102 if line.count(
".root")>=2:
104 entries=line.replace(
"\n",
"").
split()
105 if not entries[0]
in prim:
106 prim.append(entries[0])
107 if not entries[1]
in sec:
108 sec.append(entries[1])
109 elif (line.find(
".root")!=-1):
110 entry=line.replace(
"\n",
"")
111 if not entry
in prim:
114 if not hasattr(s,
"fileNames"):
115 s.fileNames=cms.untracked.vstring(prim)
117 s.fileNames.extend(prim)
119 if not hasattr(s,
"secondaryFileNames"):
120 s.secondaryFileNames=cms.untracked.vstring(sec)
122 s.secondaryFileNames.extend(sec)
123 print "found files: ",prim
125 raise Exception(
"There are not files in input from the file list")
127 print "found parent files:",sec
132 import FWCore.ParameterSet.Config
as cms
135 print "the query is",query
138 while eC!=0
and count<3:
140 print 'Sleeping, then retrying DAS' 142 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True)
144 tupleP = os.waitpid(p.pid, 0)
148 print "DAS succeeded after",count,
"attempts",eC
150 print "DAS failed 3 times- I give up" 151 for line
in pipe.split(
'\n'):
152 if line.count(
".root")>=2:
154 entries=line.replace(
"\n",
"").
split()
155 if not entries[0]
in prim:
156 prim.append(entries[0])
157 if not entries[1]
in sec:
158 sec.append(entries[1])
159 elif (line.find(
".root")!=-1):
160 entry=line.replace(
"\n",
"")
161 if not entry
in prim:
164 if not hasattr(s,
"fileNames"):
165 s.fileNames=cms.untracked.vstring(prim)
167 s.fileNames.extend(prim)
169 if not hasattr(s,
"secondaryFileNames"):
170 s.secondaryFileNames=cms.untracked.vstring(sec)
172 s.secondaryFileNames.extend(sec)
173 print "found files: ",prim
175 print "found parent files:",sec
178 def anyOf(listOfKeys,dict,opt=None):
187 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
190 """The main building routines """ 192 def __init__(self, options, process = None, with_output = False, with_input = False ):
193 """options taken from old cmsDriver and optparse """ 195 options.outfile_name = options.dirout+options.fileout
199 if self._options.isData
and options.isMC:
200 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
205 if 'ENDJOB' in self._options.step:
206 if (hasattr(self.
_options,
"outputDefinition")
and \
207 self._options.outputDefinition !=
'' and \
208 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self._options.outputDefinition)))
or \
209 (hasattr(self.
_options,
"datatier")
and \
210 self._options.datatier
and \
211 'DQMIO' in self._options.datatier):
212 print "removing ENDJOB from steps since not compatible with DQMIO dataTier" 213 self._options.step=self._options.step.replace(
',ENDJOB',
'')
218 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
221 for step
in self._options.step.split(
","):
222 if step==
'':
continue 223 stepParts = step.split(
":")
224 stepName = stepParts[0]
225 if stepName
not in stepList
and not stepName.startswith(
're'):
226 raise ValueError(
"Step "+stepName+
" unknown")
227 if len(stepParts)==1:
228 self.stepMap[stepName]=
"" 229 elif len(stepParts)==2:
230 self.stepMap[stepName]=stepParts[1].
split(
'+')
231 elif len(stepParts)==3:
232 self.stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
234 raise ValueError(
"Step definition "+step+
" invalid")
235 self.stepKeys.append(stepName)
239 self.with_output = with_output
240 if hasattr(self.
_options,
"no_output_flag")
and self._options.no_output_flag:
241 self.with_output =
False 242 self.with_input = with_input
244 self.process = cms.Process(self._options.name)
246 self.process = process
249 self.schedule =
list()
255 self.additionalCommands = []
257 self.blacklist_paths = []
258 self.addedObjects = []
259 self.additionalOutputs = {}
261 self.productionFilterSequence =
None 262 self.labelsToAssociate=[]
263 self.nextScheduleIsConditional=
False 264 self.conditionalPaths=[]
265 self.excludedPaths=[]
270 Function to add the igprof profile service so that you can dump in the middle 273 profileOpts = self._options.profile.split(
':')
275 profilerInterval = 100
276 profilerFormat =
None 277 profilerJobFormat =
None 283 startEvent = profileOpts.pop(0)
284 if not startEvent.isdigit():
285 raise Exception(
"%s is not a number" % startEvent)
286 profilerStart =
int(startEvent)
288 eventInterval = profileOpts.pop(0)
289 if not eventInterval.isdigit():
290 raise Exception(
"%s is not a number" % eventInterval)
291 profilerInterval =
int(eventInterval)
293 profilerFormat = profileOpts.pop(0)
296 if not profilerFormat:
297 profilerFormat =
"%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace(
"_cfi",
""),
299 self._options.pileup,
300 self._options.conditions,
301 self._options.datatier,
302 self._options.profileTypeLabel)
303 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
304 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
305 elif not profilerJobFormat:
306 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 308 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
311 includeFile = includeFile.replace(
'/',
'.')
312 self.process.load(includeFile)
313 return sys.modules[includeFile]
316 """helper routine to load am memorize imports""" 319 includeFile = includeFile.replace(
'/',
'.')
320 self.imports.append(includeFile)
321 self.process.load(includeFile)
322 return sys.modules[includeFile]
325 """helper routine to remember replace statements""" 326 self.additionalCommands.append(command)
327 if not command.strip().startswith(
"#"):
330 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
334 if 'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys():
335 self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring(
'ProductNotFound'),fileMode = cms.untracked.string(
'FULLMERGE'))
337 self.process.options = cms.untracked.PSet( )
339 self.addedObjects.append((
"",
"options"))
341 if self._options.lazy_download:
342 self.process.AdaptorConfig = cms.Service(
"AdaptorConfig",
343 stats = cms.untracked.bool(
True),
344 enable = cms.untracked.bool(
True),
345 cacheHint = cms.untracked.string(
"lazy-download"),
346 readHint = cms.untracked.string(
"read-ahead-buffered")
348 self.addedObjects.append((
"Setup lazy download",
"AdaptorConfig"))
353 if self._options.profile:
355 self.process.IgProfService = cms.Service(
"IgProfService",
356 reportFirstEvent = cms.untracked.int32(start),
357 reportEventInterval = cms.untracked.int32(interval),
358 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
359 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
360 self.addedObjects.append((
"Setup IGProf Service for profiling",
"IgProfService"))
363 """Here we decide how many evts will be processed""" 364 self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(
int(self._options.number)))
365 if self._options.number_out:
366 self.process.maxEvents.output = cms.untracked.int32(
int(self._options.number_out))
367 self.addedObjects.append((
"",
"maxEvents"))
370 """Here the source is built. Priority: file, generator""" 371 self.addedObjects.append((
"Input source",
"source"))
374 for entry
in self._options.filein.split(
','):
376 if entry.startswith(
"filelist:"):
378 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
379 filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
381 self.process.source.fileNames.append(self._options.dirin+entry)
382 if self._options.secondfilein:
383 if not hasattr(self.process.source,
"secondaryFileNames"):
384 raise Exception(
"--secondfilein not compatible with "+self._options.filetype+
"input type")
385 for entry
in self._options.secondfilein.split(
','):
387 if entry.startswith(
"filelist:"):
388 self.process.source.secondaryFileNames.extend((
filesFromList(entry[9:]))[0])
389 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
390 self.process.source.secondaryFileNames.extend((
filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
392 self.process.source.secondaryFileNames.append(self._options.dirin+entry)
394 if self._options.filein
or self._options.dasquery:
395 if self._options.filetype ==
"EDM":
396 self.process.source=cms.Source(
"PoolSource",
397 fileNames = cms.untracked.vstring(),
398 secondaryFileNames= cms.untracked.vstring())
400 elif self._options.filetype ==
"DAT":
401 self.process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
403 elif self._options.filetype ==
"LHE":
404 self.process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
405 if self._options.filein.startswith(
"lhe:"):
407 args=self._options.filein.split(
':')
409 print 'LHE input from article ',article
410 location=
'/store/lhe/' 412 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
413 for line
in textOfFiles:
414 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
415 self.process.source.fileNames.append(location+article+
'/'+fileName)
418 print 'Issue to load LHE files, please check and try again.' 421 if len(self.process.source.fileNames)==0:
422 print 'Issue with empty filename, but can pass line check' 425 self.process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
429 elif self._options.filetype ==
"DQM":
430 self.process.source=cms.Source(
"DQMRootSource",
431 fileNames = cms.untracked.vstring())
434 elif self._options.filetype ==
"DQMDAQ":
436 self.process.source=cms.Source(
"DQMStreamerReader")
439 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
440 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
442 if self._options.dasquery!=
'':
443 self.process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
444 filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
446 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
447 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
450 if 'GEN' in self.stepMap.keys():
451 if self._options.inputCommands:
452 self._options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 454 self._options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 456 if self.process.source
and self._options.inputCommands:
457 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
458 for command
in self._options.inputCommands.split(
','):
460 command = command.strip()
461 if command==
'':
continue 462 self.process.source.inputCommands.append(command)
463 if not self._options.dropDescendant:
464 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
466 if self._options.lumiToProcess:
467 import FWCore.PythonUtilities.LumiList
as LumiList
468 self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange(
LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().
split(
',') )
470 if 'GEN' in self.stepMap.keys()
or 'LHE' in self.stepMap
or (
not self._options.filein
and hasattr(self.
_options,
"evt_type")):
471 if self.process.source
is None:
472 self.process.source=cms.Source(
"EmptySource")
475 self.runsAndWeights=
None 476 if self._options.runsAndWeightsForMC
or self._options.runsScenarioForMC :
477 if not self._options.isMC :
478 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
479 if self._options.runsAndWeightsForMC:
480 self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
482 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
483 if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
484 __import__(RunsAndWeights[self._options.runsScenarioForMC])
485 self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
487 self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
489 if self.runsAndWeights:
490 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
492 self.additionalCommands.append(
'import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
493 self.additionalCommands.append(
'ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
498 """ Add output module to the process """ 500 if self._options.outputDefinition:
501 if self._options.datatier:
502 print "--datatier & --eventcontent options ignored" 505 outList = eval(self._options.outputDefinition)
506 for (id,outDefDict)
in enumerate(outList):
507 outDefDictStr=outDefDict.__str__()
508 if not isinstance(outDefDict,dict):
509 raise Exception(
"--output needs to be passed a list of dict"+self._options.outputDefinition+
" is invalid")
511 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
514 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
515 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
516 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
517 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
518 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
520 if not theModuleLabel:
521 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
522 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
523 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 525 for name
in tryNames:
526 if not hasattr(self.process,name):
529 if not theModuleLabel:
530 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
532 defaultFileName=self._options.outfile_name
534 defaultFileName=self._options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
536 theFileName=self._options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
537 if not theFileName.endswith(
'.root'):
540 if len(outDefDict.keys()):
541 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
542 if theStreamType==
'DQMIO': theStreamType=
'DQM' 543 if theStreamType==
'ALL':
544 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
546 theEventContent = getattr(self.process, theStreamType+
"EventContent")
550 if theStreamType==
'ALCARECO' and not theFilterName:
551 theFilterName=
'StreamALCACombined' 554 CppType=
'PoolOutputModule' 555 if self._options.timeoutOutput:
556 CppType=
'TimeoutPoolOutputModule' 557 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 558 output = cms.OutputModule(CppType,
559 theEventContent.clone(),
560 fileName = cms.untracked.string(theFileName),
561 dataset = cms.untracked.PSet(
562 dataTier = cms.untracked.string(theTier),
563 filterName = cms.untracked.string(theFilterName))
565 if not theSelectEvent
and hasattr(self.process,
'generation_step')
and theStreamType!=
'LHE':
566 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
567 if not theSelectEvent
and hasattr(self.process,
'filtering_step'):
568 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
570 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
573 if not hasattr(output,
'SelectEvents'):
574 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
575 for alca
in self.AlCaPaths:
576 output.SelectEvents.SelectEvents.extend(getattr(self.process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
579 if hasattr(self.process,theModuleLabel):
580 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
582 setattr(self.process,theModuleLabel,output)
583 outputModule=getattr(self.process,theModuleLabel)
584 setattr(self.process,theModuleLabel+
'_step',cms.EndPath(outputModule))
585 path=getattr(self.process,theModuleLabel+
'_step')
586 self.schedule.append(path)
588 if not self._options.inlineEventContent
and hasattr(self.process,theStreamType+
"EventContent"):
591 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
592 if theExtraOutputCommands:
593 if not isinstance(theExtraOutputCommands,list):
594 raise Exception(
"extra ouput command in --option must be a list of strings")
595 if hasattr(self.process,theStreamType+
"EventContent"):
596 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
598 outputModule.outputCommands.extend(theExtraOutputCommands)
600 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
605 streamTypes=self._options.eventcontent.split(
',')
606 tiers=self._options.datatier.split(
',')
607 if not self._options.outputDefinition
and len(streamTypes)!=len(tiers):
608 raise Exception(
"number of event content arguments does not match number of datatier arguments")
611 if self._options.step.split(
',')[0].
split(
':')[0] ==
'ALCA':
614 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
615 if streamType==
'':
continue 616 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self._options.step:
continue 617 if streamType==
'DQMIO': streamType=
'DQM' 618 eventContent=streamType
620 if streamType ==
"NANOEDMAOD" :
621 eventContent =
"NANOAOD" 622 elif streamType ==
"NANOEDMAODSIM" :
623 eventContent =
"NANOAODSIM" 624 theEventContent = getattr(self.process, eventContent+
"EventContent")
626 theFileName=self._options.outfile_name
627 theFilterName=self._options.filtername
629 theFileName=self._options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
630 theFilterName=self._options.filtername
631 CppType=
'PoolOutputModule' 632 if self._options.timeoutOutput:
633 CppType=
'TimeoutPoolOutputModule' 634 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 635 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 636 output = cms.OutputModule(CppType,
638 fileName = cms.untracked.string(theFileName),
639 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
640 filterName = cms.untracked.string(theFilterName)
643 if hasattr(self.process,
"generation_step")
and streamType!=
'LHE':
644 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
645 if hasattr(self.process,
"filtering_step"):
646 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
648 if streamType==
'ALCARECO':
649 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
651 if "MINIAOD" in streamType:
652 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
655 outputModuleName=streamType+
'output' 656 setattr(self.process,outputModuleName,output)
657 outputModule=getattr(self.process,outputModuleName)
658 setattr(self.process,outputModuleName+
'_step',cms.EndPath(outputModule))
659 path=getattr(self.process,outputModuleName+
'_step')
660 self.schedule.append(path)
662 if self._options.outputCommands
and streamType!=
'DQM':
663 for evct
in self._options.outputCommands.split(
','):
664 if not evct:
continue 665 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
667 if not self._options.inlineEventContent:
668 tmpstreamType=streamType
669 if "NANOEDM" in tmpstreamType :
670 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
673 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
675 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
681 Add selected standard sequences to the process 684 if self._options.pileup:
685 pileupSpec=self._options.pileup.split(
',')[0]
688 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
689 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
690 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
694 if '.' in pileupSpec:
695 mixingDict={
'file':pileupSpec}
696 elif pileupSpec.startswith(
'file:'):
697 mixingDict={
'file':pileupSpec[5:]}
700 mixingDict=copy.copy(Mixing[pileupSpec])
701 if len(self._options.pileup.split(
','))>1:
702 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(
',')+1:]))
705 if 'file:' in pileupSpec:
707 self.process.load(mixingDict[
'file'])
708 print "inlining mixing module configuration" 709 self._options.inlineObjets+=
',mix' 711 self.loadAndRemember(mixingDict[
'file'])
713 mixingDict.pop(
'file')
714 if not "DATAMIX" in self.stepMap.keys():
715 if self._options.pileup_input:
716 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
717 mixingDict[
'F']=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
718 elif self._options.pileup_input.startswith(
"filelist:"):
719 mixingDict[
'F']=(
filesFromList(self._options.pileup_input[9:]))[0]
721 mixingDict[
'F']=self._options.pileup_input.split(
',')
723 for command
in specialization:
724 self.executeAndRemember(command)
725 if len(mixingDict)!=0:
726 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
731 if len(self.stepMap):
732 self.loadAndRemember(self.GeometryCFF)
733 if (
'SIM' in self.stepMap
or 'reSIM' in self.stepMap)
and not self._options.fast:
734 self.loadAndRemember(self.SimGeometryCFF)
735 if self.geometryDBLabel:
736 self.executeAndRemember(
'process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
738 print "Geometry option",self._options.geometry,
"unknown." 741 if len(self.stepMap):
742 self.loadAndRemember(self.magFieldCFF)
744 for stepName
in self.stepKeys:
745 stepSpec = self.stepMap[stepName]
746 print "Step:", stepName,
"Spec:",stepSpec
747 if stepName.startswith(
're'):
749 if stepName[2:]
not in self._options.donotDropOnInput:
750 self._options.inputEventContent=
'%s,%s'%(stepName.upper(),self._options.inputEventContent)
751 stepName=stepName[2:]
753 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
754 elif type(stepSpec)==list:
755 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
756 elif type(stepSpec)==tuple:
757 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
759 raise ValueError(
"Invalid step definition")
761 if self._options.restoreRNDSeeds!=
False:
763 if self._options.restoreRNDSeeds==
True:
764 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
766 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
767 if self._options.inputEventContent
or self._options.inputCommands:
768 if self._options.inputCommands:
769 self._options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 771 self._options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 775 if self._options.inputEventContent:
777 def dropSecondDropStar(iec):
788 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
789 for evct
in self._options.inputEventContent.split(
','):
790 if evct==
'':
continue 791 theEventContent = getattr(self.process, evct+
"EventContent")
792 if hasattr(theEventContent,
'outputCommands'):
793 self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
794 if hasattr(theEventContent,
'inputCommands'):
795 self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
797 dropSecondDropStar(self.process.source.inputCommands)
799 if not self._options.dropDescendant:
800 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
806 """Add conditions to the process""" 807 if not self._options.conditions:
return 809 if 'FrontierConditions_GlobalTag' in self._options.conditions:
810 print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line' 811 self._options.conditions = self._options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
813 self.loadAndRemember(self.ConditionsDefaultCFF)
815 self.process.GlobalTag =
GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
816 self.additionalCommands.append(
'from Configuration.AlCa.GlobalTag import GlobalTag')
817 self.additionalCommands.append(
'process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
821 """Include the customise code """ 825 for c
in self._options.customisation_file:
826 custOpt.extend(c.split(
","))
828 for c
in self._options.customisation_file_unsch:
829 custOpt.extend(c.split(
","))
835 raise Exception(
"more than . in the specification:"+opt)
836 fileName=opt.split(
'.')[0]
837 if opt.count(
'.')==0: rest=
'customise' 839 rest=opt.split(
'.')[1]
840 if rest==
'py': rest=
'customise' 842 if fileName
in custMap:
843 custMap[fileName].extend(rest.split(
'+'))
845 custMap[fileName]=rest.split(
'+')
850 final_snippet=
'\n# customisation of the process.\n' 854 allFcn.extend(custMap[opt])
856 if allFcn.count(fcn)!=1:
857 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
861 packageName = f.replace(
".py",
"").
replace(
"/",
".")
862 __import__(packageName)
863 package = sys.modules[packageName]
866 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
868 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 869 if self._options.inline_custom:
870 for line
in file(customiseFile,
'r'): 871 if "import FWCore.ParameterSet.Config" in line:
873 final_snippet += line
875 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
876 for fcn
in custMap[f]:
877 print "customising the process with",fcn,
"from",f
878 if not hasattr(package,fcn):
880 raise Exception(
"config "+f+
" has no function "+fcn)
882 self.process=getattr(package,fcn)(self.process)
884 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
885 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
888 final_snippet +=
'\n# End of customisation functions\n' 894 final_snippet=
'\n# Customisation from command line\n' 895 if self._options.customise_commands:
897 for com
in self._options.customise_commands.split(
'\\n'):
898 com=string.lstrip(com)
900 final_snippet +=
'\n'+com
909 if len(self.stepMap):
911 if self._options.particleTable
not in defaultOptions.particleTableList:
912 print 'Invalid particle table provided. Options are:' 913 print defaultOptions.particleTable
916 if len(self.stepMap):
917 self.
loadAndRemember(
'SimGeneral.HepPDTESSource.'+self._options.particleTable+
'_cfi')
936 self.EIDefaultCFF=
None 937 self.SKIMDefaultCFF=
"Configuration/StandardSequences/Skims_cff" 938 self.POSTRECODefaultCFF=
"Configuration/StandardSequences/PostRecoGenerator_cff" 939 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/Validation_cff" 940 self.L1HwValDefaultCFF =
"Configuration/StandardSequences/L1HwVal_cff" 941 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOffline_cff" 942 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/Harvesting_cff" 943 self.ALCAHARVESTDefaultCFF=
"Configuration/StandardSequences/AlCaHarvesting_cff" 944 self.ENDJOBDefaultCFF=
"Configuration/StandardSequences/EndOfProcess_cff" 945 self.ConditionsDefaultCFF =
"Configuration/StandardSequences/FrontierConditions_GlobalTag_cff" 946 self.CFWRITERDefaultCFF =
"Configuration/StandardSequences/CrossingFrameWriter_cff" 947 self.REPACKDefaultCFF=
"Configuration/StandardSequences/DigiToRaw_Repack_cff" 949 if "DATAMIX" in self.stepMap.keys():
950 self.DATAMIXDefaultCFF=
"Configuration/StandardSequences/DataMixer"+self._options.datamix+
"_cff" 951 if self._options.datamix ==
'PreMix':
952 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiDMPreMix_cff" 956 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 958 if "DIGIPREMIX" in self.stepMap.keys():
959 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/Digi_PreMix_cff" 961 self.
L1EMDefaultCFF=
"Configuration/StandardSequences/SimL1EmulatorPreMix_cff" 963 self.ALCADefaultSeq=
None 964 self.LHEDefaultSeq=
'externalLHEProducer' 965 self.GENDefaultSeq=
'pgen' 966 self.SIMDefaultSeq=
'psim' 967 self.DIGIDefaultSeq=
'pdigi' 968 self.DIGIPREMIXDefaultSeq=
'pdigi' 969 self.DIGIPREMIX_S2DefaultSeq=
'pdigi' 970 self.DATAMIXDefaultSeq=
None 971 self.DIGI2RAWDefaultSeq=
'DigiToRaw' 972 self.HLTDefaultSeq=
'GRun' 973 self.L1DefaultSeq=
None 974 self.L1REPACKDefaultSeq=
'GT' 975 self.HARVESTINGDefaultSeq=
None 976 self.ALCAHARVESTDefaultSeq=
None 977 self.CFWRITERDefaultSeq=
None 978 self.RAW2DIGIDefaultSeq=
'RawToDigi' 979 self.L1RecoDefaultSeq=
'L1Reco' 980 self.L1TrackTriggerDefaultSeq=
'L1TrackTrigger' 981 if self._options.fast
or (
'RAW2DIGI' in self.stepMap
and 'RECO' in self.stepMap):
982 self.RECODefaultSeq=
'reconstruction' 984 self.RECODefaultSeq=
'reconstruction_fromRECO' 985 self.RECOSIMDefaultSeq=
'recosim' 986 self.EIDefaultSeq=
'top' 987 self.POSTRECODefaultSeq=
None 988 self.L1HwValDefaultSeq=
'L1HwVal' 989 self.DQMDefaultSeq=
'DQMOffline' 990 self.VALIDATIONDefaultSeq=
'' 991 self.ENDJOBDefaultSeq=
'endOfProcess' 992 self.REPACKDefaultSeq=
'DigiToRawRepack' 993 self.PATDefaultSeq=
'miniAOD' 994 self.PATGENDefaultSeq=
'miniGEN' 995 self.NANODefaultSeq=
'nanoSequence' 997 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContent_cff" 999 if not self._options.beamspot:
1000 self._options.beamspot=VtxSmearedDefaultKey
1003 if self._options.isMC==
True:
1005 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 1006 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 1007 self.PATGENDefaultCFF=
"Configuration/StandardSequences/PATGEN_cff" 1008 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineMC_cff" 1009 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1010 self.NANODefaultSeq=
'nanoSequenceMC' 1012 self._options.beamspot =
None 1015 if 'reGEN' in self.stepMap:
1016 self.GENDefaultSeq=
'fixGenInfo' 1018 if self._options.scenario==
'cosmics':
1019 self._options.pileup=
'Cosmics' 1020 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1021 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1022 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1023 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentCosmics_cff" 1024 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationCosmics_cff" 1025 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmics_cff" 1026 if self._options.isMC==
True:
1027 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmicsMC_cff" 1028 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingCosmics_cff" 1029 self.RECODefaultSeq=
'reconstructionCosmics' 1030 self.DQMDefaultSeq=
'DQMOfflineCosmics' 1032 if self._options.scenario==
'HeavyIons':
1033 if not self._options.beamspot:
1034 self._options.beamspot=VtxSmearedHIDefaultKey
1035 self.HLTDefaultSeq =
'HIon' 1036 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationHeavyIons_cff" 1037 self.VALIDATIONDefaultSeq=
'' 1038 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentHeavyIons_cff" 1039 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1040 self.RECODefaultSeq=
'reconstructionHeavyIons' 1041 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1042 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIons_cff" 1043 self.DQMDefaultSeq=
'DQMOfflineHeavyIons' 1044 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1045 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingHeavyIons_cff" 1046 if self._options.isMC==
True:
1047 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff" 1050 self.RAW2RECODefaultSeq=
','.
join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1052 self.USERDefaultSeq=
'user' 1053 self.USERDefaultCFF=
None 1056 if self._options.isData:
1057 if self._options.magField==defaultOptions.magField:
1058 print "magnetic field option forced to: AutoFromDBCurrent" 1059 self._options.magField=
'AutoFromDBCurrent' 1060 self.magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace(
'.',
'')+
'_cff' 1061 self.magFieldCFF = self.magFieldCFF.replace(
"__",
'_')
1064 self.GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff' 1065 self.geometryDBLabel=
None 1067 if self._options.fast:
1068 if 'start' in self._options.conditions.lower():
1069 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1071 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1074 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1075 if opt
in GeometryConf:
1076 return GeometryConf[opt]
1080 geoms=self._options.geometry.split(
',')
1084 if '/' in geoms[1]
or '_cff' in geoms[1]:
1085 self.GeometryCFF=geoms[1]
1087 self.GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1089 if (geoms[0].startswith(
'DB:')):
1090 self.SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff' 1091 self.geometryDBLabel=geoms[0][3:]
1094 if '/' in geoms[0]
or '_cff' in geoms[0]:
1095 self.SimGeometryCFF=geoms[0]
1097 simGeometry=geoms[0]
1098 if self._options.gflash==
True:
1099 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1101 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1104 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1105 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1107 if self._options.scenario==
'nocoll' or self._options.scenario==
'cosmics':
1108 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1109 self._options.beamspot=
'NoSmear' 1112 if self._options.fast:
1113 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1114 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1115 self.RECOBEFMIXDefaultCFF =
'FastSimulation.Configuration.Reconstruction_BefMix_cff' 1116 self.RECOBEFMIXDefaultSeq =
'reconstruction_befmix' 1117 self.DQMOFFLINEDefaultCFF=
"FastSimulation.Configuration.DQMOfflineMC_cff" 1120 if self._options.pileup==
'default':
1121 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1122 self._options.pileup=MixingDefaultKey
1126 if self._options.isData:
1127 self._options.pileup=
None 1130 self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1135 output = cms.OutputModule(
"PoolOutputModule")
1136 if stream.selectEvents.parameters_().__len__()!=0:
1137 output.SelectEvents = stream.selectEvents
1139 output.SelectEvents = cms.untracked.PSet()
1140 output.SelectEvents.SelectEvents=cms.vstring()
1141 if isinstance(stream.paths,tuple):
1142 for path
in stream.paths:
1143 output.SelectEvents.SelectEvents.append(path.label())
1145 output.SelectEvents.SelectEvents.append(stream.paths.label())
1149 if isinstance(stream.content,str):
1150 evtPset=getattr(self.process,stream.content)
1151 for p
in evtPset.parameters_():
1152 setattr(output,p,getattr(evtPset,p))
1153 if not self._options.inlineEventContent:
1156 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1158 output.outputCommands = stream.content
1161 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1163 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1164 filterName = cms.untracked.string(stream.name))
1166 if self._options.filtername:
1167 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1170 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1172 if workflow
in (
"producers,full"):
1173 if isinstance(stream.paths,tuple):
1174 for path
in stream.paths:
1175 self.schedule.append(path)
1177 self.schedule.append(stream.paths)
1181 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1182 self.additionalOutputs[name] = output
1183 setattr(self.process,name,output)
1185 if workflow ==
'output':
1187 filterList = output.SelectEvents.SelectEvents
1188 for i, filter
in enumerate(filterList):
1189 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1199 if ( len(sequence.split(
'.'))==1 ):
1201 elif ( len(sequence.split(
'.'))==2 ):
1203 sequence=sequence.split(
'.')[1]
1205 print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a" 1206 print sequence,
"not recognized" 1213 for i,s
in enumerate(seq.split(
'*')):
1215 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1217 p=getattr(self.process,prefix)
1218 p+=getattr(self.process, s)
1219 self.schedule.append(getattr(self.process,prefix))
1224 if self.nextScheduleIsConditional:
1225 self.conditionalPaths.append(prefix)
1226 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1227 self.schedule.append(getattr(self.process,prefix))
1229 for i,s
in enumerate(seq.split(
'+')):
1231 setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1232 self.schedule.append(getattr(self.process,sn))
1246 """ Enrich the process with alca streams """ 1248 sequence = sequence.split(
'.')[-1]
1251 alcaList = sequence.split(
"+")
1253 from Configuration.AlCa.autoAlca
import autoAlca
1257 for name
in alcaConfig.__dict__:
1258 alcastream = getattr(alcaConfig,name)
1259 shortName = name.replace(
'ALCARECOStream',
'')
1260 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1261 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1262 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1263 self.AlCaPaths.append(shortName)
1264 if 'DQM' in alcaList:
1265 if not self._options.inlineEventContent
and hasattr(self.process,name):
1266 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1268 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1271 if self._options.hltProcess
or 'HLT' in self.stepMap:
1272 if isinstance(alcastream.paths,tuple):
1273 for path
in alcastream.paths:
1278 for i
in range(alcaList.count(shortName)):
1279 alcaList.remove(shortName)
1282 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1283 path = getattr(alcaConfig,name)
1284 self.schedule.append(path)
1285 alcaList.remove(
'DQM')
1287 if isinstance(alcastream,cms.Path):
1289 self.blacklist_paths.append(alcastream)
1292 if len(alcaList) != 0:
1294 for name
in alcaConfig.__dict__:
1295 alcastream = getattr(alcaConfig,name)
1296 if isinstance(alcastream,cms.FilteredStream):
1297 available.append(name.replace(
'ALCARECOStream',
''))
1298 print "The following alcas could not be found "+
str(alcaList)
1299 print "available ",available
1301 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1306 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1307 print "Loading lhe fragment from",loadFragment
1308 __import__(loadFragment)
1309 self.process.load(loadFragment)
1311 self._options.inlineObjets+=
','+sequence
1313 getattr(self.process,sequence).nEvents =
int(self._options.number)
1316 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1317 self.excludedPaths.append(
"lhe_step")
1318 self.schedule.append( self.process.lhe_step )
1321 """ load the fragment of generator configuration """ 1326 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'')
1328 if not '/' in loadFragment:
1329 loadFragment=
'Configuration.Generator.'+loadFragment
1331 loadFragment=loadFragment.replace(
'/',
'.')
1333 print "Loading generator fragment from",loadFragment
1334 __import__(loadFragment)
1338 if not (self._options.filein
or self._options.dasquery):
1339 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1342 generatorModule=sys.modules[loadFragment]
1343 genModules=generatorModule.__dict__
1346 if self.LHEDefaultSeq
in genModules:
1347 del genModules[self.LHEDefaultSeq]
1349 if self._options.hideGen:
1350 self.loadAndRemember(loadFragment)
1352 self.process.load(loadFragment)
1354 import FWCore.ParameterSet.Modules
as cmstypes
1355 for name
in genModules:
1356 theObject = getattr(generatorModule,name)
1357 if isinstance(theObject, cmstypes._Module):
1358 self._options.inlineObjets=name+
','+self._options.inlineObjets
1359 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1360 self._options.inlineObjets+=
','+name
1362 if sequence == self.GENDefaultSeq
or sequence ==
'pgen_genonly':
1363 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1364 self.productionFilterSequence =
'ProductionFilterSequence' 1365 elif 'generator' in genModules:
1366 self.productionFilterSequence =
'generator' 1368 """ Enrich the schedule with the rest of the generation step """ 1369 self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1370 genSeqName=sequence.split(
'.')[-1]
1374 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1375 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1376 self.loadAndRemember(cffToBeLoaded)
1378 raise Exception(
"VertexSmearing type or beamspot "+self._options.beamspot+
" unknown.")
1380 if self._options.scenario ==
'HeavyIons':
1381 if self._options.pileup==
'HiMixGEN':
1382 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1384 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1386 self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1387 self.schedule.append(self.process.generation_step)
1390 self.executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1392 if 'reGEN' in self.stepMap:
1396 """ Enrich the schedule with the summary of the filter step """ 1398 self.loadAndRemember(
"GeneratorInterface/Core/genFilterSummary_cff")
1399 self.scheduleSequenceAtEnd(
'genFilterSummary',
'genfiltersummary_step')
1403 """ Enrich the schedule with the simulation step""" 1404 self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1405 if not self._options.fast:
1406 if self._options.gflash==
True:
1407 self.loadAndRemember(
"Configuration/StandardSequences/GFlashSIM_cff")
1409 if self._options.magField==
'0T':
1410 self.executeAndRemember(
"process.g4SimHits.UseMagneticField = cms.bool(False)")
1412 if self._options.magField==
'0T':
1413 self.executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1415 self.scheduleSequence(sequence.split(
'.')[-1],
'simulation_step')
1419 """ Enrich the schedule with the digitisation step""" 1422 if self._options.gflash==
True:
1423 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1425 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1426 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1428 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and not self.process.source.type_()==
'EmptySource':
1429 if self._options.inputEventContent==
'':
1430 self._options.inputEventContent=
'REGEN' 1432 self._options.inputEventContent=self._options.inputEventContent+
',REGEN' 1439 """ Enrich the schedule with the digitisation step""" 1444 if sequence ==
'pdigi_valid':
1445 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1447 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1453 """ Enrich the schedule with the digitisation step""" 1454 self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1456 self.loadAndRemember(
"SimGeneral/MixingModule/digi_MixPreMix_cfi")
1459 if sequence ==
'pdigi_valid':
1460 self.executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1462 self.executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1464 self.scheduleSequence(sequence.split(
'.')[-1],
'digitisation_step')
1468 """ Enrich the schedule with the crossing frame writer step""" 1474 """ Enrich the schedule with the digitisation step""" 1478 if self._options.pileup_input:
1480 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
1481 theFiles=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1482 elif self._options.pileup_input.startswith(
"filelist:"):
1483 theFiles= (
filesFromList(self._options.pileup_input[9:]))[0]
1485 theFiles=self._options.pileup_input.split(
',')
1487 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1494 if "DIGIPREMIX" in self.stepMap.keys():
1496 self.
executeAndRemember(
"process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')")
1506 """ Enrich the schedule with the L1 simulation step""" 1507 assert(sequence ==
None)
1513 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1514 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT']
1515 if sequence
in supported:
1516 self.loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1517 if self._options.scenario ==
'HeavyIons':
1518 self.renameInputTagsInSequence(
"SimL1Emulator",
"rawDataCollector",
"rawDataRepacker")
1519 self.scheduleSequence(
'SimL1Emulator',
'L1RePack_step')
1521 print "L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported
1526 """ Enrich the schedule with the HLT simulation step""" 1528 print "no specification of the hlt menu has been given, should never happen" 1529 raise Exception(
'no HLT sequence provided')
1533 from Configuration.HLT.autoHLT
import autoHLT
1536 sequence = autoHLT[key]
1538 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1544 if self._options.scenario ==
'HeavyIons':
1545 optionsForHLT[
'type'] =
'HIon' 1547 optionsForHLT[
'type'] =
'GRun' 1548 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in optionsForHLT.iteritems())
1549 if sequence ==
'run,fromSource':
1550 if hasattr(self.process.source,
'firstRun'):
1551 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1552 elif hasattr(self.process.source,
'setRunNumber'):
1553 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1555 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1557 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1561 if self._options.isMC:
1562 self._options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1564 if self._options.name !=
'HLT':
1565 self.additionalCommands.append(
'from HLTrigger.Configuration.CustomConfigs import ProcessName')
1566 self.additionalCommands.append(
'process = ProcessName(process)')
1567 self.additionalCommands.append(
'')
1568 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1571 self.schedule.append(self.process.HLTSchedule)
1572 [self.blacklist_paths.append(path)
for path
in self.process.HLTSchedule
if isinstance(path,(cms.Path,cms.EndPath))]
1575 if self._options.fast:
1576 if not hasattr(self.process,
'HLTEndSequence'):
1577 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1582 seqReco=sequence.split(
',')[1]
1583 seqDigi=sequence.split(
',')[0]
1585 print "RAW2RECO requires two specifications",sequence,
"insufficient" 1599 self.
loadAndRemember(
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1601 for filt
in allMetFilterPaths:
1602 self.schedule.append(getattr(self.process,
'Flag_'+filt))
1605 ''' Enrich the schedule with L1 HW validation ''' 1608 print '\n\n\n DEPRECATED this has no action \n\n\n' 1612 ''' Enrich the schedule with L1 reconstruction ''' 1618 ''' Enrich the schedule with L1 reconstruction ''' 1624 ''' Enrich the schedule with a user defined filter sequence ''' 1626 filterConfig=self.load(sequence.split(
'.')[0])
1627 filterSeq=sequence.split(
'.')[-1]
1635 label=visitee.label()
1643 getattr(self.process,filterSeq).
visit( expander )
1644 self._options.inlineObjets+=
','+expander.inliner
1645 self._options.inlineObjets+=
','+filterSeq
1648 self.scheduleSequence(filterSeq,
'filtering_step')
1649 self.nextScheduleIsConditional=
True 1651 self.productionFilterSequence = filterSeq
1656 ''' Enrich the schedule with reconstruction ''' 1662 ''' Enrich the schedule with reconstruction ''' 1668 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1669 if not self._options.fast:
1670 print "ERROR: this step is only implemented for FastSim" 1673 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1677 ''' Enrich the schedule with PAT ''' 1680 self.labelsToAssociate.append(
'patTask')
1681 if not self._options.runUnscheduled:
1682 raise Exception(
"MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1683 if self._options.isData:
1684 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1686 if self._options.fast:
1687 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1689 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1691 if self._options.hltProcess:
1692 if len(self._options.customise_commands) > 1:
1693 self._options.customise_commands = self._options.customise_commands +
" \n" 1694 self._options.customise_commands = self._options.customise_commands +
"process.patTrigger.processName = \""+self._options.hltProcess+
"\"\n" 1695 self._options.customise_commands = self._options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1702 ''' Enrich the schedule with PATGEN ''' 1704 self.labelsToAssociate.append(
'patGENTask')
1705 if not self._options.runUnscheduled:
1706 raise Exception(
"MiniGEN production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1707 if self._options.isData:
1708 raise Exception(
"PATGEN step can only run on MC")
1712 ''' Enrich the schedule with NANO ''' 1715 custom =
"nanoAOD_customizeData" if self._options.isData
else "nanoAOD_customizeMC" 1716 if self._options.runUnscheduled:
1717 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1719 self._options.customisation_file.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1720 if self._options.hltProcess:
1721 if len(self._options.customise_commands) > 1:
1722 self._options.customise_commands = self._options.customise_commands +
" \n" 1723 self._options.customise_commands = self._options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1727 ''' Enrich the schedule with event interpretation ''' 1728 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1729 if sequence
in EventInterpretation:
1730 self.EIDefaultCFF = EventInterpretation[sequence]
1731 sequence =
'EIsequence' 1733 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1734 self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1735 self.scheduleSequence(sequence.split(
'.')[-1],
'eventinterpretaion_step')
1739 ''' Enrich the schedule with skimming fragments''' 1741 sequence = sequence.split(
'.')[-1]
1743 skimlist=sequence.split(
'+')
1745 from Configuration.Skimming.autoSkim
import autoSkim
1749 for skim
in skimConfig.__dict__:
1750 skimstream = getattr(skimConfig,skim)
1751 if isinstance(skimstream,cms.Path):
1753 self.blacklist_paths.append(skimstream)
1754 if (
not isinstance(skimstream,cms.FilteredStream)):
1756 shortname = skim.replace(
'SKIMStream',
'')
1757 if (sequence==
"all"):
1759 elif (shortname
in skimlist):
1762 if self._options.datatier==
'DQM':
1763 self.process.load(self.EVTCONTDefaultCFF)
1764 skimstreamDQM = cms.FilteredStream(
1765 responsible = skimstream.responsible,
1766 name = skimstream.name+
'DQM',
1767 paths = skimstream.paths,
1768 selectEvents = skimstream.selectEvents,
1769 content = self._options.datatier+
'EventContent',
1770 dataTier = cms.untracked.string(self._options.datatier)
1773 for i
in range(skimlist.count(shortname)):
1774 skimlist.remove(shortname)
1778 if (skimlist.__len__()!=0
and sequence!=
"all"):
1779 print 'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist)
1780 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1783 ''' Enrich the schedule with a user defined sequence ''' 1789 """ Enrich the schedule with the postreco step """ 1796 print sequence,
"in preparing validation" 1798 from Validation.Configuration.autoValidation
import autoValidation
1800 sequence=sequence.split(
'.')[-1]
1801 if sequence.find(
',')!=-1:
1802 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1803 valSeqName=sequence.split(
',')[1].
split(
'+')
1808 prevalSeqName=sequence.split(
'+')
1809 valSeqName=sequence.split(
'+')
1815 postfix=
'_'+sequence
1816 prevalSeqName=[
'prevalidation'+postfix]
1817 valSeqName=[
'validation'+postfix]
1818 if not hasattr(self.process,valSeqName[0]):
1820 valSeqName=[sequence]
1831 if (
'HLT' in self.stepMap
and not self._options.fast)
or self._options.hltProcess:
1832 for s
in valSeqName+prevalSeqName:
1835 for (i,s)
in enumerate(prevalSeqName):
1837 setattr(self.process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1838 self.schedule.append(getattr(self.process,
'prevalidation_step%s'%NFI(i)))
1840 for (i,s)
in enumerate(valSeqName):
1841 setattr(self.process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1842 self.schedule.append(getattr(self.process,
'validation_step%s'%NFI(i)))
1845 if 'PAT' in self.stepMap
and not 'RECO' in self.stepMap:
1848 if not 'DIGI' in self.stepMap
and not self._options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1849 if self._options.restoreRNDSeeds==
False and not self._options.restoreRNDSeeds==
True:
1850 self._options.restoreRNDSeeds=
True 1852 if not 'DIGI' in self.stepMap
and not self._options.fast:
1856 self._options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1858 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1860 for (i,s)
in enumerate(valSeqName):
1861 getattr(self.process,
'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,
'validation_step%s'%NFI(i))._seq
1867 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1868 It will climb down within PSets, VPSets and VInputTags to find its target""" 1869 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1876 if isinstance(pset, cms._Parameterizable):
1877 for name
in pset.parameters_().
keys():
1883 value = getattr(pset,name)
1884 type = value.pythonTypeName()
1885 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1886 self.
doIt(value,base+
"."+name)
1887 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1888 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1889 elif type
in (
'cms.string',
'cms.untracked.string'):
1891 if self.
_verbose:
print "set string process name %s.%s %s ==> %s"% (base, name, value, self.
_paramReplace)
1893 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1894 for (i,n)
in enumerate(value):
1895 if not isinstance(n, cms.InputTag):
1899 if self.
_verbose:
print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self.
_paramReplace)
1902 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1903 for (i,n)
in enumerate(value):
1906 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1908 if self.
_verbose:
print "set process name %s.%s %s ==> %s " % (base, name, value, self.
_paramReplace)
1909 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1914 label = visitee.label()
1915 except AttributeError:
1916 label =
'<Module not in a Process>' 1918 label =
'other execption' 1919 self.
doIt(visitee, label)
1926 print "Replacing all InputTag %s => %s"%(oldT,newT)
1929 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1930 if not loadMe
in self.additionalCommands:
1931 self.additionalCommands.append(loadMe)
1932 self.additionalCommands.append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1936 if self._options.hltProcess:
1937 proc=self._options.hltProcess
1939 proc=self.process.name_()
1940 if proc==HLTprocess:
return 1942 print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1944 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1945 self.additionalCommands.append(
'from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1946 self.additionalCommands.append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1952 while '@' in repr(seqList)
and level<maxLevel:
1954 for specifiedCommand
in seqList:
1955 if specifiedCommand.startswith(
'@'):
1956 location=specifiedCommand[1:]
1957 if not location
in mapping:
1958 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1959 mappedTo=mapping[location]
1961 mappedTo=mappedTo[index]
1962 seqList.remove(specifiedCommand)
1963 seqList.extend(mappedTo.split(
'+'))
1966 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1972 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1973 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1974 from DQMOffline.Configuration.autoDQM
import autoDQM
1978 if len(set(sequenceList))!=len(sequenceList):
1979 sequenceList=
list(set(sequenceList))
1980 print "Duplicate entries for DQM:, using",sequenceList
1982 pathName=
'dqmoffline_step' 1983 for (i,sequence)
in enumerate(sequenceList):
1985 pathName=
'dqmoffline_%d_step'%(i)
1987 if 'HLT' in self.stepMap.keys()
or self._options.hltProcess:
1990 setattr(self.process,pathName, cms.EndPath( getattr(self.process,sequence ) ) )
1991 self.schedule.append(getattr(self.process,pathName))
1993 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1995 getattr(self.process,pathName).
insert(0,self.process.genstepfilter)
1997 pathName=
'dqmofflineOnPAT_step' 1998 for (i,sequence)
in enumerate(postSequenceList):
2000 pathName=
'dqmofflineOnPAT_%d_step'%(i)
2002 setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
2003 self.schedule.append(getattr(self.process,pathName))
2006 """ Enrich the process with harvesting step """ 2007 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self._options.harvesting+
'_cff' 2011 sequence = sequence.split(
'.')[-1]
2014 harvestingList = sequence.split(
"+")
2015 from DQMOffline.Configuration.autoDQM
import autoDQM
2016 from Validation.Configuration.autoValidation
import autoValidation
2018 combined_mapping = copy.deepcopy( autoDQM )
2019 combined_mapping.update( autoValidation )
2020 self.
expandMapping(harvestingList,combined_mapping,index=-1)
2022 if len(set(harvestingList))!=len(harvestingList):
2023 harvestingList=
list(set(harvestingList))
2024 print "Duplicate entries for HARVESTING, using",harvestingList
2026 for name
in harvestingList:
2027 if not name
in harvestingConfig.__dict__:
2028 print name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
2030 harvestingstream = getattr(harvestingConfig,name)
2031 if isinstance(harvestingstream,cms.Path):
2032 self.schedule.append(harvestingstream)
2033 self.blacklist_paths.append(harvestingstream)
2034 if isinstance(harvestingstream,cms.Sequence):
2035 setattr(self.process,name+
"_step",cms.Path(harvestingstream))
2036 self.schedule.append(getattr(self.process,name+
"_step"))
2042 """ Enrich the process with AlCaHarvesting step """ 2044 sequence=sequence.split(
".")[-1]
2047 harvestingList = sequence.split(
"+")
2051 from Configuration.AlCa.autoPCL
import autoPCL
2054 for name
in harvestingConfig.__dict__:
2055 harvestingstream = getattr(harvestingConfig,name)
2056 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2057 self.schedule.append(harvestingstream)
2058 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2059 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2060 harvestingList.remove(name)
2062 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2063 self.schedule.append(lastStep)
2065 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2066 print "The following harvesting could not be found : ", harvestingList
2067 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2077 self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2078 self.schedule.append(self.process.reconstruction)
2082 """ Add useful info for the production. """ 2083 self.process.configurationMetadata=cms.untracked.PSet\
2084 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2085 name=cms.untracked.string(
"Applications"),
2086 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2089 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2093 """ Prepare the configuration string and add missing pieces.""" 2105 outputModuleCfgCode=
"" 2106 if not 'HARVESTING' in self.stepMap.keys()
and not 'ALCAHARVEST' in self.stepMap.keys()
and not 'ALCAOUTPUT' in self.stepMap.keys()
and self.with_output:
2111 self.pythonCfgCode =
"# Auto generated configuration file\n" 2112 self.pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2113 self.pythonCfgCode +=
"# with command line options: "+self._options.arguments+
'\n' 2114 self.pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2115 if hasattr(self.
_options,
"era")
and self._options.era :
2116 self.pythonCfgCode +=
"from Configuration.StandardSequences.Eras import eras\n\n" 2117 self.pythonCfgCode +=
"process = cms.Process('"+self.process.name_()+
"'" 2119 for requestedEra
in self._options.era.split(
",") :
2120 self.pythonCfgCode +=
",eras."+requestedEra
2121 self.pythonCfgCode +=
")\n\n" 2123 self.pythonCfgCode +=
"process = cms.Process('"+self.process.name_()+
"')\n\n" 2125 self.pythonCfgCode +=
"# import of standard configurations\n" 2126 for module
in self.imports:
2127 self.pythonCfgCode += (
"process.load('"+module+
"')\n")
2130 if not hasattr(self.process,
"configurationMetadata"):
2134 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2136 self.pythonCfgCode +=
"\n" 2137 for comment,object
in self.addedObjects:
2139 self.pythonCfgCode +=
"\n# "+comment+
"\n" 2140 self.pythonCfgCode +=
dumpPython(self.process,object)
2143 self.pythonCfgCode +=
"\n# Output definition\n" 2144 self.pythonCfgCode += outputModuleCfgCode
2147 self.pythonCfgCode +=
"\n# Additional output definition\n" 2149 nl=self.additionalOutputs.keys()
2152 output = self.additionalOutputs[name]
2153 self.pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2154 tmpOut = cms.EndPath(output)
2155 setattr(self.process,name+
'OutPath',tmpOut)
2156 self.schedule.append(tmpOut)
2159 self.pythonCfgCode +=
"\n# Other statements\n" 2160 for command
in self.additionalCommands:
2161 self.pythonCfgCode += command +
"\n" 2164 for object
in self._options.inlineObjets.split(
','):
2167 if not hasattr(self.process,object):
2168 print 'cannot inline -'+object+
'- : not known' 2170 self.pythonCfgCode +=
'\n' 2171 self.pythonCfgCode +=
dumpPython(self.process,object)
2174 self.pythonCfgCode +=
"\n# Path and EndPath definitions\n" 2175 for path
in self.process.paths:
2176 if getattr(self.process,path)
not in self.blacklist_paths:
2177 self.pythonCfgCode +=
dumpPython(self.process,path)
2179 for endpath
in self.process.endpaths:
2180 if getattr(self.process,endpath)
not in self.blacklist_paths:
2181 self.pythonCfgCode +=
dumpPython(self.process,endpath)
2184 self.pythonCfgCode +=
"\n# Schedule definition\n" 2185 result =
"process.schedule = cms.Schedule(" 2188 self.process.schedule = cms.Schedule()
2189 for item
in self.schedule:
2190 if not isinstance(item, cms.Schedule):
2191 self.process.schedule.append(item)
2193 self.process.schedule.extend(item)
2195 if hasattr(self.process,
"HLTSchedule"):
2196 beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2197 afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2198 pathNames = [
'process.'+p.label_()
for p
in beforeHLT]
2199 result +=
','.
join(pathNames)+
')\n' 2200 result +=
'process.schedule.extend(process.HLTSchedule)\n' 2201 pathNames = [
'process.'+p.label_()
for p
in afterHLT]
2202 result +=
'process.schedule.extend(['+
','.
join(pathNames)+
'])\n' 2204 pathNames = [
'process.'+p.label_()
for p
in self.schedule]
2205 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2207 self.pythonCfgCode += result
2209 for labelToAssociate
in self.labelsToAssociate:
2210 self.process.schedule.associate(getattr(self.process, labelToAssociate))
2211 self.pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2215 self.pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2216 self.pythonCfgCode+=
"associatePatAlgosToolsTask(process)\n" 2218 if self._options.nThreads
is not "1":
2219 self.pythonCfgCode +=
"\n" 2220 self.pythonCfgCode +=
"#Setup FWK for multithreaded\n" 2221 self.pythonCfgCode +=
"process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+
")\n" 2222 self.pythonCfgCode +=
"process.options.numberOfStreams=cms.untracked.uint32(0)\n" 2224 if self._options.isRepacked:
2225 self.pythonCfgCode +=
"\n" 2226 self.pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2227 self.pythonCfgCode +=
"MassReplaceInputTag(process)\n" 2228 MassReplaceInputTag(self.process)
2231 if self.productionFilterSequence:
2232 self.pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2233 self.pythonCfgCode +=
'for path in process.paths:\n' 2234 if len(self.conditionalPaths):
2235 self.pythonCfgCode +=
'\tif not path in %s: continue\n'%
str(self.conditionalPaths)
2236 if len(self.excludedPaths):
2237 self.pythonCfgCode +=
'\tif path in %s: continue\n'%
str(self.excludedPaths)
2238 self.pythonCfgCode +=
'\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2239 pfs = getattr(self.process,self.productionFilterSequence)
2240 for path
in self.process.paths:
2241 if not path
in self.conditionalPaths:
continue 2242 if path
in self.excludedPaths:
continue 2243 getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2249 if self._options.runUnscheduled:
2252 self.pythonCfgCode+=
"#do not add changes to your config after this point (unless you know what you are doing)\n" 2253 self.pythonCfgCode+=
"from FWCore.ParameterSet.Utilities import convertToUnscheduled\n" 2254 self.pythonCfgCode+=
"process=convertToUnscheduled(process)\n" 2256 from FWCore.ParameterSet.Utilities
import convertToUnscheduled
2263 if hasattr(self.process,
"logErrorHarvester"):
2265 self.pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2266 self.pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2267 self.pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2268 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2275 self.pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2276 self.pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2277 self.pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2278 self.pythonCfgCode +=
"# End adding early deletion\n" 2279 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2285 if self._options.io:
2287 if not self._options.io.endswith(
'.io'): self._option.io+=
'.io' 2288 io=open(self._options.io,
'w')
2290 if hasattr(self.process.source,
"fileNames"):
2291 if len(self.process.source.fileNames.value()):
2292 ioJson[
'primary']=self.process.source.fileNames.value()
2293 if hasattr(self.process.source,
"secondaryFileNames"):
2294 if len(self.process.source.secondaryFileNames.value()):
2295 ioJson[
'secondary']=self.process.source.secondaryFileNames.value()
2296 if self._options.pileup_input
and (self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:')):
2297 ioJson[
'pileup']=self._options.pileup_input[4:]
2298 for (o,om)
in self.process.outputModules_().
items():
2299 ioJson[o]=om.fileName.value()
2300 ioJson[
'GT']=self.process.GlobalTag.globaltag.value()
2301 if self.productionFilterSequence:
2302 ioJson[
'filter']=self.productionFilterSequence
2304 io.write(json.dumps(ioJson))
def load(self, includeFile)
def filesFromOption(self)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def prepare_L1REPACK(self, sequence=None)
def finalizeFastSimHLT(self)
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def doNotInlineEventContent(instance, label="cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)")
event content
def customiseEarlyDelete(process)
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_DIGIPREMIX(self, sequence=None)
def prepare_RECO(self, sequence="reconstruction")
put the filtering path in the schedule
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def prepare_PATGEN(self, sequence="miniGEN")
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def prepare_RECOSIM(self, sequence="recosim")
def addCustomise(self, unsch=0)
def prepare_DIGIPREMIX_S2(self, sequence=None)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
now for a useful command
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, sequence=None)
def convertToUnscheduled(proc)
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_NANO(self, sequence="nanoAOD")
inliner
needs to be in reverse order
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
def prepare_ALCAHARVEST(self, sequence=None)
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def completeInputCommand(self)
add the corresponding input content
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def addStandardSequences(self)
def prepare_PAT(self, sequence="miniAOD")