3 __version__ =
"$Revision: 1.19 $" 4 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 6 import FWCore.ParameterSet.Config
as cms
7 from FWCore.ParameterSet.Modules
import _Module
11 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
16 from subprocess
import Popen,PIPE
17 import FWCore.ParameterSet.DictTypes
as DictTypes
23 defaultOptions.datamix =
'DataOnSim' 24 defaultOptions.isMC=
False 25 defaultOptions.isData=
True 26 defaultOptions.step=
'' 27 defaultOptions.pileup=
'NoPileUp' 28 defaultOptions.pileup_input =
None 29 defaultOptions.pileup_dasoption =
'' 30 defaultOptions.geometry =
'SimDB' 31 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
32 defaultOptions.magField =
'' 33 defaultOptions.conditions =
None 34 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
35 defaultOptions.harvesting=
'AtRunEnd' 36 defaultOptions.gflash =
False 37 defaultOptions.number = -1
38 defaultOptions.number_out =
None 39 defaultOptions.arguments =
"" 40 defaultOptions.name =
"NO NAME GIVEN" 41 defaultOptions.evt_type =
"" 42 defaultOptions.filein =
"" 43 defaultOptions.dasquery=
"" 44 defaultOptions.dasoption=
"" 45 defaultOptions.secondfilein =
"" 46 defaultOptions.customisation_file = []
47 defaultOptions.customisation_file_unsch = []
48 defaultOptions.customise_commands =
"" 49 defaultOptions.inline_custom=
False 50 defaultOptions.particleTable =
'pythiapdt' 51 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
52 defaultOptions.dirin =
'' 53 defaultOptions.dirout =
'' 54 defaultOptions.filetype =
'EDM' 55 defaultOptions.fileout =
'output.root' 56 defaultOptions.filtername =
'' 57 defaultOptions.lazy_download =
False 58 defaultOptions.custom_conditions =
'' 59 defaultOptions.hltProcess =
'' 60 defaultOptions.eventcontent =
None 61 defaultOptions.datatier =
None 62 defaultOptions.inlineEventContent =
True 63 defaultOptions.inlineObjets =
'' 64 defaultOptions.hideGen=
False 65 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
66 defaultOptions.beamspot=
None 67 defaultOptions.outputDefinition =
'' 68 defaultOptions.inputCommands =
None 69 defaultOptions.outputCommands =
None 70 defaultOptions.inputEventContent =
'' 71 defaultOptions.dropDescendant =
False 72 defaultOptions.relval =
None 73 defaultOptions.profile =
None 74 defaultOptions.isRepacked =
False 75 defaultOptions.restoreRNDSeeds =
False 76 defaultOptions.donotDropOnInput =
'' 77 defaultOptions.python_filename =
'' 78 defaultOptions.io=
None 79 defaultOptions.lumiToProcess=
None 80 defaultOptions.fast=
False 81 defaultOptions.runsAndWeightsForMC =
None 82 defaultOptions.runsScenarioForMC =
None 83 defaultOptions.runUnscheduled =
False 84 defaultOptions.timeoutOutput =
False 85 defaultOptions.nThreads =
'1' 89 theObject = getattr(process,name)
90 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
91 return "process."+name+
" = " + theObject.dumpPython(
"process")
92 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
93 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 95 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 98 import FWCore.ParameterSet.Config
as cms
101 for line
in open(fileName,
'r'): 102 if line.count(
".root")>=2:
104 entries=line.replace(
"\n",
"").
split()
105 if not entries[0]
in prim:
106 prim.append(entries[0])
107 if not entries[1]
in sec:
108 sec.append(entries[1])
109 elif (line.find(
".root")!=-1):
110 entry=line.replace(
"\n",
"")
111 if not entry
in prim:
114 if not hasattr(s,
"fileNames"):
115 s.fileNames=cms.untracked.vstring(prim)
117 s.fileNames.extend(prim)
119 if not hasattr(s,
"secondaryFileNames"):
120 s.secondaryFileNames=cms.untracked.vstring(sec)
122 s.secondaryFileNames.extend(sec)
123 print "found files: ",prim
125 raise Exception(
"There are not files in input from the file list")
127 print "found parent files:",sec
132 import FWCore.ParameterSet.Config
as cms
135 print "the query is",query
138 while eC!=0
and count<3:
140 print 'Sleeping, then retrying DAS' 142 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True)
144 tupleP = os.waitpid(p.pid, 0)
148 print "DAS succeeded after",count,
"attempts",eC
150 print "DAS failed 3 times- I give up" 151 for line
in pipe.split(
'\n'):
152 if line.count(
".root")>=2:
154 entries=line.replace(
"\n",
"").
split()
155 if not entries[0]
in prim:
156 prim.append(entries[0])
157 if not entries[1]
in sec:
158 sec.append(entries[1])
159 elif (line.find(
".root")!=-1):
160 entry=line.replace(
"\n",
"")
161 if not entry
in prim:
164 if not hasattr(s,
"fileNames"):
165 s.fileNames=cms.untracked.vstring(prim)
167 s.fileNames.extend(prim)
169 if not hasattr(s,
"secondaryFileNames"):
170 s.secondaryFileNames=cms.untracked.vstring(sec)
172 s.secondaryFileNames.extend(sec)
173 print "found files: ",prim
175 print "found parent files:",sec
178 def anyOf(listOfKeys,dict,opt=None):
187 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
190 """The main building routines """ 192 def __init__(self, options, process = None, with_output = False, with_input = False ):
193 """options taken from old cmsDriver and optparse """ 195 options.outfile_name = options.dirout+options.fileout
199 if self._options.isData
and options.isMC:
200 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
205 if 'ENDJOB' in self._options.step:
206 if (hasattr(self.
_options,
"outputDefinition")
and \
207 self._options.outputDefinition !=
'' and \
208 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self._options.outputDefinition)))
or \
209 (hasattr(self.
_options,
"datatier")
and \
210 self._options.datatier
and \
211 'DQMIO' in self._options.datatier):
212 print "removing ENDJOB from steps since not compatible with DQMIO dataTier" 213 self._options.step=self._options.step.replace(
',ENDJOB',
'')
218 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
221 for step
in self._options.step.split(
","):
222 if step==
'':
continue 223 stepParts = step.split(
":")
224 stepName = stepParts[0]
225 if stepName
not in stepList
and not stepName.startswith(
're'):
226 raise ValueError(
"Step "+stepName+
" unknown")
227 if len(stepParts)==1:
228 self.stepMap[stepName]=
"" 229 elif len(stepParts)==2:
230 self.stepMap[stepName]=stepParts[1].
split(
'+')
231 elif len(stepParts)==3:
232 self.stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
234 raise ValueError(
"Step definition "+step+
" invalid")
235 self.stepKeys.append(stepName)
239 self.with_output = with_output
242 if hasattr(self.
_options,
"no_output_flag")
and self._options.no_output_flag:
243 self.with_output =
False 244 self.with_input = with_input
248 self.schedule =
list()
254 self.additionalCommands = []
256 self.blacklist_paths = []
257 self.addedObjects = []
258 self.additionalOutputs = {}
260 self.productionFilterSequence =
None 261 self.labelsToAssociate=[]
262 self.nextScheduleIsConditional=
False 263 self.conditionalPaths=[]
264 self.excludedPaths=[]
269 Function to add the igprof profile service so that you can dump in the middle 272 profileOpts = self._options.profile.split(
':')
274 profilerInterval = 100
275 profilerFormat =
None 276 profilerJobFormat =
None 282 startEvent = profileOpts.pop(0)
283 if not startEvent.isdigit():
284 raise Exception(
"%s is not a number" % startEvent)
285 profilerStart =
int(startEvent)
287 eventInterval = profileOpts.pop(0)
288 if not eventInterval.isdigit():
289 raise Exception(
"%s is not a number" % eventInterval)
290 profilerInterval =
int(eventInterval)
292 profilerFormat = profileOpts.pop(0)
295 if not profilerFormat:
296 profilerFormat =
"%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace(
"_cfi",
""),
298 self._options.pileup,
299 self._options.conditions,
300 self._options.datatier,
301 self._options.profileTypeLabel)
302 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
303 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
304 elif not profilerJobFormat:
305 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 307 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
310 includeFile = includeFile.replace(
'/',
'.')
311 self.process.load(includeFile)
312 return sys.modules[includeFile]
315 """helper routine to load am memorize imports""" 318 includeFile = includeFile.replace(
'/',
'.')
319 self.imports.append(includeFile)
320 self.process.load(includeFile)
321 return sys.modules[includeFile]
324 """helper routine to remember replace statements""" 325 self.additionalCommands.append(command)
326 if not command.strip().startswith(
"#"):
329 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
333 if 'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys():
334 self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring(
'ProductNotFound'),fileMode = cms.untracked.string(
'FULLMERGE'))
336 self.process.options = cms.untracked.PSet( )
338 self.addedObjects.append((
"",
"options"))
340 if self._options.lazy_download:
341 self.process.AdaptorConfig = cms.Service(
"AdaptorConfig",
342 stats = cms.untracked.bool(
True),
343 enable = cms.untracked.bool(
True),
344 cacheHint = cms.untracked.string(
"lazy-download"),
345 readHint = cms.untracked.string(
"read-ahead-buffered")
347 self.addedObjects.append((
"Setup lazy download",
"AdaptorConfig"))
352 if self._options.profile:
354 self.process.IgProfService = cms.Service(
"IgProfService",
355 reportFirstEvent = cms.untracked.int32(start),
356 reportEventInterval = cms.untracked.int32(interval),
357 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
358 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
359 self.addedObjects.append((
"Setup IGProf Service for profiling",
"IgProfService"))
362 """Here we decide how many evts will be processed""" 363 self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(
int(self._options.number)))
364 if self._options.number_out:
365 self.process.maxEvents.output = cms.untracked.int32(
int(self._options.number_out))
366 self.addedObjects.append((
"",
"maxEvents"))
369 """Here the source is built. Priority: file, generator""" 370 self.addedObjects.append((
"Input source",
"source"))
373 for entry
in self._options.filein.split(
','):
375 if entry.startswith(
"filelist:"):
377 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
378 filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
380 self.process.source.fileNames.append(self._options.dirin+entry)
381 if self._options.secondfilein:
382 if not hasattr(self.process.source,
"secondaryFileNames"):
383 raise Exception(
"--secondfilein not compatible with "+self._options.filetype+
"input type")
384 for entry
in self._options.secondfilein.split(
','):
386 if entry.startswith(
"filelist:"):
387 self.process.source.secondaryFileNames.extend((
filesFromList(entry[9:]))[0])
388 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
389 self.process.source.secondaryFileNames.extend((
filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
391 self.process.source.secondaryFileNames.append(self._options.dirin+entry)
393 if self._options.filein
or self._options.dasquery:
394 if self._options.filetype ==
"EDM":
395 self.process.source=cms.Source(
"PoolSource",
396 fileNames = cms.untracked.vstring(),
397 secondaryFileNames= cms.untracked.vstring())
399 elif self._options.filetype ==
"DAT":
400 self.process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
402 elif self._options.filetype ==
"LHE":
403 self.process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
404 if self._options.filein.startswith(
"lhe:"):
406 args=self._options.filein.split(
':')
408 print 'LHE input from article ',article
409 location=
'/store/lhe/' 411 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
412 for line
in textOfFiles:
413 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
414 self.process.source.fileNames.append(location+article+
'/'+fileName)
417 print 'Issue to load LHE files, please check and try again.' 420 if len(self.process.source.fileNames)==0:
421 print 'Issue with empty filename, but can pass line check' 424 self.process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
428 elif self._options.filetype ==
"DQM":
429 self.process.source=cms.Source(
"DQMRootSource",
430 fileNames = cms.untracked.vstring())
433 elif self._options.filetype ==
"DQMDAQ":
435 self.process.source=cms.Source(
"DQMStreamerReader")
438 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
439 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
441 if self._options.dasquery!=
'':
442 self.process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
443 filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
445 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
446 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
449 if 'GEN' in self.stepMap.keys():
450 if self._options.inputCommands:
451 self._options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 453 self._options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 455 if self.process.source
and self._options.inputCommands:
456 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
457 for command
in self._options.inputCommands.split(
','):
459 command = command.strip()
460 if command==
'':
continue 461 self.process.source.inputCommands.append(command)
462 if not self._options.dropDescendant:
463 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
465 if self._options.lumiToProcess:
466 import FWCore.PythonUtilities.LumiList
as LumiList
467 self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange(
LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().
split(
',') )
469 if 'GEN' in self.stepMap.keys()
or 'LHE' in self.stepMap
or (
not self._options.filein
and hasattr(self.
_options,
"evt_type")):
470 if self.process.source
is None:
471 self.process.source=cms.Source(
"EmptySource")
474 self.runsAndWeights=
None 475 if self._options.runsAndWeightsForMC
or self._options.runsScenarioForMC :
476 if not self._options.isMC :
477 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
478 if self._options.runsAndWeightsForMC:
479 self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
481 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
482 if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
483 __import__(RunsAndWeights[self._options.runsScenarioForMC])
484 self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
486 self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
488 if self.runsAndWeights:
489 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
491 self.additionalCommands.append(
'import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
492 self.additionalCommands.append(
'ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
497 """ Add output module to the process """ 499 if self._options.outputDefinition:
500 if self._options.datatier:
501 print "--datatier & --eventcontent options ignored" 504 outList = eval(self._options.outputDefinition)
505 for (id,outDefDict)
in enumerate(outList):
506 outDefDictStr=outDefDict.__str__()
507 if not isinstance(outDefDict,dict):
508 raise Exception(
"--output needs to be passed a list of dict"+self._options.outputDefinition+
" is invalid")
510 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
513 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
514 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
515 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
516 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
517 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
519 if not theModuleLabel:
520 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
521 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
522 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 524 for name
in tryNames:
525 if not hasattr(self.process,name):
528 if not theModuleLabel:
529 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
531 defaultFileName=self._options.outfile_name
533 defaultFileName=self._options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
535 theFileName=self._options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
536 if not theFileName.endswith(
'.root'):
539 if len(outDefDict.keys()):
540 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
541 if theStreamType==
'DQMIO': theStreamType=
'DQM' 542 if theStreamType==
'ALL':
543 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
545 theEventContent = getattr(self.process, theStreamType+
"EventContent")
549 if theStreamType==
'ALCARECO' and not theFilterName:
550 theFilterName=
'StreamALCACombined' 553 CppType=
'PoolOutputModule' 554 if self._options.timeoutOutput:
555 CppType=
'TimeoutPoolOutputModule' 556 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 557 output = cms.OutputModule(CppType,
558 theEventContent.clone(),
559 fileName = cms.untracked.string(theFileName),
560 dataset = cms.untracked.PSet(
561 dataTier = cms.untracked.string(theTier),
562 filterName = cms.untracked.string(theFilterName))
564 if not theSelectEvent
and hasattr(self.process,
'generation_step')
and theStreamType!=
'LHE':
565 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
566 if not theSelectEvent
and hasattr(self.process,
'filtering_step'):
567 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
569 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
572 if not hasattr(output,
'SelectEvents'):
573 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
574 for alca
in self.AlCaPaths:
575 output.SelectEvents.SelectEvents.extend(getattr(self.process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
578 if hasattr(self.process,theModuleLabel):
579 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
581 setattr(self.process,theModuleLabel,output)
582 outputModule=getattr(self.process,theModuleLabel)
583 setattr(self.process,theModuleLabel+
'_step',cms.EndPath(outputModule))
584 path=getattr(self.process,theModuleLabel+
'_step')
585 self.schedule.append(path)
587 if not self._options.inlineEventContent
and hasattr(self.process,theStreamType+
"EventContent"):
590 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
591 if theExtraOutputCommands:
592 if not isinstance(theExtraOutputCommands,list):
593 raise Exception(
"extra ouput command in --option must be a list of strings")
594 if hasattr(self.process,theStreamType+
"EventContent"):
595 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
597 outputModule.outputCommands.extend(theExtraOutputCommands)
599 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
604 streamTypes=self._options.eventcontent.split(
',')
605 tiers=self._options.datatier.split(
',')
606 if not self._options.outputDefinition
and len(streamTypes)!=len(tiers):
607 raise Exception(
"number of event content arguments does not match number of datatier arguments")
610 if self._options.step.split(
',')[0].
split(
':')[0] ==
'ALCA':
613 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
614 if streamType==
'':
continue 615 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self._options.step:
continue 616 if streamType==
'DQMIO': streamType=
'DQM' 617 eventContent=streamType
619 if streamType ==
"NANOEDMAOD" :
620 eventContent =
"NANOAOD" 621 elif streamType ==
"NANOEDMAODSIM" :
622 eventContent =
"NANOAODSIM" 623 theEventContent = getattr(self.process, eventContent+
"EventContent")
625 theFileName=self._options.outfile_name
626 theFilterName=self._options.filtername
628 theFileName=self._options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
629 theFilterName=self._options.filtername
630 CppType=
'PoolOutputModule' 631 if self._options.timeoutOutput:
632 CppType=
'TimeoutPoolOutputModule' 633 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 634 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 635 output = cms.OutputModule(CppType,
637 fileName = cms.untracked.string(theFileName),
638 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
639 filterName = cms.untracked.string(theFilterName)
642 if hasattr(self.process,
"generation_step")
and streamType!=
'LHE':
643 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
644 if hasattr(self.process,
"filtering_step"):
645 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
647 if streamType==
'ALCARECO':
648 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
650 if "MINIAOD" in streamType:
651 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
654 outputModuleName=streamType+
'output' 655 setattr(self.process,outputModuleName,output)
656 outputModule=getattr(self.process,outputModuleName)
657 setattr(self.process,outputModuleName+
'_step',cms.EndPath(outputModule))
658 path=getattr(self.process,outputModuleName+
'_step')
659 self.schedule.append(path)
661 if self._options.outputCommands
and streamType!=
'DQM':
662 for evct
in self._options.outputCommands.split(
','):
663 if not evct:
continue 664 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
666 if not self._options.inlineEventContent:
667 tmpstreamType=streamType
668 if "NANOEDM" in tmpstreamType :
669 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
672 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
674 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
680 Add selected standard sequences to the process 683 if self._options.pileup:
684 pileupSpec=self._options.pileup.split(
',')[0]
687 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
688 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
689 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
693 if '.' in pileupSpec:
694 mixingDict={
'file':pileupSpec}
695 elif pileupSpec.startswith(
'file:'):
696 mixingDict={
'file':pileupSpec[5:]}
699 mixingDict=copy.copy(Mixing[pileupSpec])
700 if len(self._options.pileup.split(
','))>1:
701 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(
',')+1:]))
704 if 'file:' in pileupSpec:
706 self.process.load(mixingDict[
'file'])
707 print "inlining mixing module configuration" 708 self._options.inlineObjets+=
',mix' 710 self.loadAndRemember(mixingDict[
'file'])
712 mixingDict.pop(
'file')
713 if not "DATAMIX" in self.stepMap.keys():
714 if self._options.pileup_input:
715 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
716 mixingDict[
'F']=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
717 elif self._options.pileup_input.startswith(
"filelist:"):
718 mixingDict[
'F']=(
filesFromList(self._options.pileup_input[9:]))[0]
720 mixingDict[
'F']=self._options.pileup_input.split(
',')
722 for command
in specialization:
723 self.executeAndRemember(command)
724 if len(mixingDict)!=0:
725 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
730 if len(self.stepMap):
731 self.loadAndRemember(self.GeometryCFF)
732 if (
'SIM' in self.stepMap
or 'reSIM' in self.stepMap)
and not self._options.fast:
733 self.loadAndRemember(self.SimGeometryCFF)
734 if self.geometryDBLabel:
735 self.executeAndRemember(
'process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
737 print "Geometry option",self._options.geometry,
"unknown." 740 if len(self.stepMap):
741 self.loadAndRemember(self.magFieldCFF)
743 for stepName
in self.stepKeys:
744 stepSpec = self.stepMap[stepName]
745 print "Step:", stepName,
"Spec:",stepSpec
746 if stepName.startswith(
're'):
748 if stepName[2:]
not in self._options.donotDropOnInput:
749 self._options.inputEventContent=
'%s,%s'%(stepName.upper(),self._options.inputEventContent)
750 stepName=stepName[2:]
752 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
753 elif type(stepSpec)==list:
754 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
755 elif type(stepSpec)==tuple:
756 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
758 raise ValueError(
"Invalid step definition")
760 if self._options.restoreRNDSeeds!=
False:
762 if self._options.restoreRNDSeeds==
True:
763 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
765 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
766 if self._options.inputEventContent
or self._options.inputCommands:
767 if self._options.inputCommands:
768 self._options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 770 self._options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 774 if self._options.inputEventContent:
776 def dropSecondDropStar(iec):
787 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
788 for evct
in self._options.inputEventContent.split(
','):
789 if evct==
'':
continue 790 theEventContent = getattr(self.process, evct+
"EventContent")
791 if hasattr(theEventContent,
'outputCommands'):
792 self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
793 if hasattr(theEventContent,
'inputCommands'):
794 self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
796 dropSecondDropStar(self.process.source.inputCommands)
798 if not self._options.dropDescendant:
799 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
805 """Add conditions to the process""" 806 if not self._options.conditions:
return 808 if 'FrontierConditions_GlobalTag' in self._options.conditions:
809 print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line' 810 self._options.conditions = self._options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
812 self.loadAndRemember(self.ConditionsDefaultCFF)
814 self.process.GlobalTag =
GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
815 self.additionalCommands.append(
'from Configuration.AlCa.GlobalTag import GlobalTag')
816 self.additionalCommands.append(
'process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
820 """Include the customise code """ 824 for c
in self._options.customisation_file:
825 custOpt.extend(c.split(
","))
827 for c
in self._options.customisation_file_unsch:
828 custOpt.extend(c.split(
","))
834 raise Exception(
"more than . in the specification:"+opt)
835 fileName=opt.split(
'.')[0]
836 if opt.count(
'.')==0: rest=
'customise' 838 rest=opt.split(
'.')[1]
839 if rest==
'py': rest=
'customise' 841 if fileName
in custMap:
842 custMap[fileName].extend(rest.split(
'+'))
844 custMap[fileName]=rest.split(
'+')
849 final_snippet=
'\n# customisation of the process.\n' 853 allFcn.extend(custMap[opt])
855 if allFcn.count(fcn)!=1:
856 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
860 packageName = f.replace(
".py",
"").
replace(
"/",
".")
861 __import__(packageName)
862 package = sys.modules[packageName]
865 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
867 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 868 if self._options.inline_custom:
869 for line
in file(customiseFile,
'r'): 870 if "import FWCore.ParameterSet.Config" in line:
872 final_snippet += line
874 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
875 for fcn
in custMap[f]:
876 print "customising the process with",fcn,
"from",f
877 if not hasattr(package,fcn):
879 raise Exception(
"config "+f+
" has no function "+fcn)
881 self.process=getattr(package,fcn)(self.process)
883 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
884 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
887 final_snippet +=
'\n# End of customisation functions\n' 893 final_snippet=
'\n# Customisation from command line\n' 894 if self._options.customise_commands:
896 for com
in self._options.customise_commands.split(
'\\n'):
897 com=string.lstrip(com)
899 final_snippet +=
'\n'+com
908 if len(self.stepMap):
910 if self._options.particleTable
not in defaultOptions.particleTableList:
911 print 'Invalid particle table provided. Options are:' 912 print defaultOptions.particleTable
915 if len(self.stepMap):
916 self.
loadAndRemember(
'SimGeneral.HepPDTESSource.'+self._options.particleTable+
'_cfi')
935 self.EIDefaultCFF=
None 936 self.SKIMDefaultCFF=
"Configuration/StandardSequences/Skims_cff" 937 self.POSTRECODefaultCFF=
"Configuration/StandardSequences/PostRecoGenerator_cff" 938 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/Validation_cff" 939 self.L1HwValDefaultCFF =
"Configuration/StandardSequences/L1HwVal_cff" 940 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOffline_cff" 941 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/Harvesting_cff" 942 self.ALCAHARVESTDefaultCFF=
"Configuration/StandardSequences/AlCaHarvesting_cff" 943 self.ENDJOBDefaultCFF=
"Configuration/StandardSequences/EndOfProcess_cff" 944 self.ConditionsDefaultCFF =
"Configuration/StandardSequences/FrontierConditions_GlobalTag_cff" 945 self.CFWRITERDefaultCFF =
"Configuration/StandardSequences/CrossingFrameWriter_cff" 946 self.REPACKDefaultCFF=
"Configuration/StandardSequences/DigiToRaw_Repack_cff" 948 if "DATAMIX" in self.stepMap.keys():
949 self.DATAMIXDefaultCFF=
"Configuration/StandardSequences/DataMixer"+self._options.datamix+
"_cff" 952 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 954 self.ALCADefaultSeq=
None 955 self.LHEDefaultSeq=
'externalLHEProducer' 956 self.GENDefaultSeq=
'pgen' 957 self.SIMDefaultSeq=
'psim' 958 self.DIGIDefaultSeq=
'pdigi' 959 self.DATAMIXDefaultSeq=
None 960 self.DIGI2RAWDefaultSeq=
'DigiToRaw' 961 self.HLTDefaultSeq=
'GRun' 962 self.L1DefaultSeq=
None 963 self.L1REPACKDefaultSeq=
'GT' 964 self.HARVESTINGDefaultSeq=
None 965 self.ALCAHARVESTDefaultSeq=
None 966 self.CFWRITERDefaultSeq=
None 967 self.RAW2DIGIDefaultSeq=
'RawToDigi' 968 self.L1RecoDefaultSeq=
'L1Reco' 969 self.L1TrackTriggerDefaultSeq=
'L1TrackTrigger' 970 if self._options.fast
or (
'RAW2DIGI' in self.stepMap
and 'RECO' in self.stepMap):
971 self.RECODefaultSeq=
'reconstruction' 973 self.RECODefaultSeq=
'reconstruction_fromRECO' 974 self.RECOSIMDefaultSeq=
'recosim' 975 self.EIDefaultSeq=
'top' 976 self.POSTRECODefaultSeq=
None 977 self.L1HwValDefaultSeq=
'L1HwVal' 978 self.DQMDefaultSeq=
'DQMOffline' 979 self.VALIDATIONDefaultSeq=
'' 980 self.ENDJOBDefaultSeq=
'endOfProcess' 981 self.REPACKDefaultSeq=
'DigiToRawRepack' 982 self.PATDefaultSeq=
'miniAOD' 983 self.PATGENDefaultSeq=
'miniGEN' 984 self.NANODefaultSeq=
'nanoSequence' 986 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContent_cff" 988 if not self._options.beamspot:
989 self._options.beamspot=VtxSmearedDefaultKey
992 if self._options.isMC==
True:
994 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 995 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 996 self.PATGENDefaultCFF=
"Configuration/StandardSequences/PATGEN_cff" 997 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineMC_cff" 998 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 999 self.NANODefaultSeq=
'nanoSequenceMC' 1001 self._options.beamspot =
None 1004 if 'reGEN' in self.stepMap:
1005 self.GENDefaultSeq=
'fixGenInfo' 1007 if self._options.scenario==
'cosmics':
1008 self._options.pileup=
'Cosmics' 1009 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1010 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1011 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1012 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentCosmics_cff" 1013 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationCosmics_cff" 1014 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmics_cff" 1015 if self._options.isMC==
True:
1016 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmicsMC_cff" 1017 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingCosmics_cff" 1018 self.RECODefaultSeq=
'reconstructionCosmics' 1019 self.DQMDefaultSeq=
'DQMOfflineCosmics' 1021 if self._options.scenario==
'HeavyIons':
1022 if not self._options.beamspot:
1023 self._options.beamspot=VtxSmearedHIDefaultKey
1024 self.HLTDefaultSeq =
'HIon' 1025 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationHeavyIons_cff" 1026 self.VALIDATIONDefaultSeq=
'' 1027 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentHeavyIons_cff" 1028 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1029 self.RECODefaultSeq=
'reconstructionHeavyIons' 1030 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1031 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIons_cff" 1032 self.DQMDefaultSeq=
'DQMOfflineHeavyIons' 1033 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1034 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingHeavyIons_cff" 1035 if self._options.isMC==
True:
1036 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff" 1039 self.RAW2RECODefaultSeq=
','.
join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1041 self.USERDefaultSeq=
'user' 1042 self.USERDefaultCFF=
None 1045 if self._options.isData:
1046 if self._options.magField==defaultOptions.magField:
1047 print "magnetic field option forced to: AutoFromDBCurrent" 1048 self._options.magField=
'AutoFromDBCurrent' 1049 self.magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace(
'.',
'')+
'_cff' 1050 self.magFieldCFF = self.magFieldCFF.replace(
"__",
'_')
1053 self.GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff' 1054 self.geometryDBLabel=
None 1056 if self._options.fast:
1057 if 'start' in self._options.conditions.lower():
1058 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1060 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1063 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1064 if opt
in GeometryConf:
1065 return GeometryConf[opt]
1069 geoms=self._options.geometry.split(
',')
1073 if '/' in geoms[1]
or '_cff' in geoms[1]:
1074 self.GeometryCFF=geoms[1]
1076 self.GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1078 if (geoms[0].startswith(
'DB:')):
1079 self.SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff' 1080 self.geometryDBLabel=geoms[0][3:]
1083 if '/' in geoms[0]
or '_cff' in geoms[0]:
1084 self.SimGeometryCFF=geoms[0]
1086 simGeometry=geoms[0]
1087 if self._options.gflash==
True:
1088 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1090 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1093 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1094 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1096 if self._options.scenario==
'nocoll' or self._options.scenario==
'cosmics':
1097 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1098 self._options.beamspot=
'NoSmear' 1101 if self._options.fast:
1102 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1103 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1104 self.RECOBEFMIXDefaultCFF =
'FastSimulation.Configuration.Reconstruction_BefMix_cff' 1105 self.RECOBEFMIXDefaultSeq =
'reconstruction_befmix' 1106 self.DQMOFFLINEDefaultCFF=
"FastSimulation.Configuration.DQMOfflineMC_cff" 1109 if self._options.pileup==
'default':
1110 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1111 self._options.pileup=MixingDefaultKey
1115 if self._options.isData:
1116 self._options.pileup=
None 1119 self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1124 output = cms.OutputModule(
"PoolOutputModule")
1125 if stream.selectEvents.parameters_().__len__()!=0:
1126 output.SelectEvents = stream.selectEvents
1128 output.SelectEvents = cms.untracked.PSet()
1129 output.SelectEvents.SelectEvents=cms.vstring()
1130 if isinstance(stream.paths,tuple):
1131 for path
in stream.paths:
1132 output.SelectEvents.SelectEvents.append(path.label())
1134 output.SelectEvents.SelectEvents.append(stream.paths.label())
1138 if isinstance(stream.content,str):
1139 evtPset=getattr(self.process,stream.content)
1140 for p
in evtPset.parameters_():
1141 setattr(output,p,getattr(evtPset,p))
1142 if not self._options.inlineEventContent:
1145 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1147 output.outputCommands = stream.content
1150 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1152 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1153 filterName = cms.untracked.string(stream.name))
1155 if self._options.filtername:
1156 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1159 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1161 if workflow
in (
"producers,full"):
1162 if isinstance(stream.paths,tuple):
1163 for path
in stream.paths:
1164 self.schedule.append(path)
1166 self.schedule.append(stream.paths)
1170 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1171 self.additionalOutputs[name] = output
1172 setattr(self.process,name,output)
1174 if workflow ==
'output':
1176 filterList = output.SelectEvents.SelectEvents
1177 for i, filter
in enumerate(filterList):
1178 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1188 if ( len(sequence.split(
'.'))==1 ):
1190 elif ( len(sequence.split(
'.'))==2 ):
1192 sequence=sequence.split(
'.')[1]
1194 print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a" 1195 print sequence,
"not recognized" 1202 for i,s
in enumerate(seq.split(
'*')):
1204 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1206 p=getattr(self.process,prefix)
1207 p+=getattr(self.process, s)
1208 self.schedule.append(getattr(self.process,prefix))
1213 if self.nextScheduleIsConditional:
1214 self.conditionalPaths.append(prefix)
1215 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1216 self.schedule.append(getattr(self.process,prefix))
1218 for i,s
in enumerate(seq.split(
'+')):
1220 setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1221 self.schedule.append(getattr(self.process,sn))
1235 """ Enrich the process with alca streams """ 1237 sequence = sequence.split(
'.')[-1]
1240 alcaList = sequence.split(
"+")
1242 from Configuration.AlCa.autoAlca
import autoAlca
1246 for name
in alcaConfig.__dict__:
1247 alcastream = getattr(alcaConfig,name)
1248 shortName = name.replace(
'ALCARECOStream',
'')
1249 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1250 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1251 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1252 self.AlCaPaths.append(shortName)
1253 if 'DQM' in alcaList:
1254 if not self._options.inlineEventContent
and hasattr(self.process,name):
1255 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1257 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1260 if self._options.hltProcess
or 'HLT' in self.stepMap:
1261 if isinstance(alcastream.paths,tuple):
1262 for path
in alcastream.paths:
1267 for i
in range(alcaList.count(shortName)):
1268 alcaList.remove(shortName)
1271 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1272 path = getattr(alcaConfig,name)
1273 self.schedule.append(path)
1274 alcaList.remove(
'DQM')
1276 if isinstance(alcastream,cms.Path):
1278 self.blacklist_paths.append(alcastream)
1281 if len(alcaList) != 0:
1283 for name
in alcaConfig.__dict__:
1284 alcastream = getattr(alcaConfig,name)
1285 if isinstance(alcastream,cms.FilteredStream):
1286 available.append(name.replace(
'ALCARECOStream',
''))
1287 print "The following alcas could not be found "+
str(alcaList)
1288 print "available ",available
1290 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1295 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1296 print "Loading lhe fragment from",loadFragment
1297 __import__(loadFragment)
1298 self.process.load(loadFragment)
1300 self._options.inlineObjets+=
','+sequence
1302 getattr(self.process,sequence).nEvents =
int(self._options.number)
1305 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1306 self.excludedPaths.append(
"lhe_step")
1307 self.schedule.append( self.process.lhe_step )
1310 """ load the fragment of generator configuration """ 1315 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'')
1317 if not '/' in loadFragment:
1318 loadFragment=
'Configuration.Generator.'+loadFragment
1320 loadFragment=loadFragment.replace(
'/',
'.')
1322 print "Loading generator fragment from",loadFragment
1323 __import__(loadFragment)
1327 if not (self._options.filein
or self._options.dasquery):
1328 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1331 generatorModule=sys.modules[loadFragment]
1332 genModules=generatorModule.__dict__
1335 if self.LHEDefaultSeq
in genModules:
1336 del genModules[self.LHEDefaultSeq]
1338 if self._options.hideGen:
1339 self.loadAndRemember(loadFragment)
1341 self.process.load(loadFragment)
1343 import FWCore.ParameterSet.Modules
as cmstypes
1344 for name
in genModules:
1345 theObject = getattr(generatorModule,name)
1346 if isinstance(theObject, cmstypes._Module):
1347 self._options.inlineObjets=name+
','+self._options.inlineObjets
1348 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1349 self._options.inlineObjets+=
','+name
1351 if sequence == self.GENDefaultSeq
or sequence ==
'pgen_genonly':
1352 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1353 self.productionFilterSequence =
'ProductionFilterSequence' 1354 elif 'generator' in genModules:
1355 self.productionFilterSequence =
'generator' 1357 """ Enrich the schedule with the rest of the generation step """ 1358 self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1359 genSeqName=sequence.split(
'.')[-1]
1363 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1364 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1365 self.loadAndRemember(cffToBeLoaded)
1367 raise Exception(
"VertexSmearing type or beamspot "+self._options.beamspot+
" unknown.")
1369 if self._options.scenario ==
'HeavyIons':
1370 if self._options.pileup==
'HiMixGEN':
1371 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1373 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1375 self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1376 self.schedule.append(self.process.generation_step)
1379 self.executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1381 if 'reGEN' in self.stepMap:
1385 """ Enrich the schedule with the summary of the filter step """ 1387 self.loadAndRemember(
"GeneratorInterface/Core/genFilterSummary_cff")
1388 self.scheduleSequenceAtEnd(
'genFilterSummary',
'genfiltersummary_step')
1392 """ Enrich the schedule with the simulation step""" 1393 self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1394 if not self._options.fast:
1395 if self._options.gflash==
True:
1396 self.loadAndRemember(
"Configuration/StandardSequences/GFlashSIM_cff")
1398 if self._options.magField==
'0T':
1399 self.executeAndRemember(
"process.g4SimHits.UseMagneticField = cms.bool(False)")
1401 if self._options.magField==
'0T':
1402 self.executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1404 self.scheduleSequence(sequence.split(
'.')[-1],
'simulation_step')
1408 """ Enrich the schedule with the digitisation step""" 1411 if self._options.gflash==
True:
1412 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1414 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1415 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1417 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and not self.process.source.type_()==
'EmptySource':
1418 if self._options.inputEventContent==
'':
1419 self._options.inputEventContent=
'REGEN' 1421 self._options.inputEventContent=self._options.inputEventContent+
',REGEN' 1428 """ Enrich the schedule with the crossing frame writer step""" 1434 """ Enrich the schedule with the digitisation step""" 1438 if self._options.pileup_input:
1440 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
1441 theFiles=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1442 elif self._options.pileup_input.startswith(
"filelist:"):
1443 theFiles= (
filesFromList(self._options.pileup_input[9:]))[0]
1445 theFiles=self._options.pileup_input.split(
',')
1447 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1462 """ Enrich the schedule with the L1 simulation step""" 1463 assert(sequence ==
None)
1469 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1470 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1471 if sequence
in supported:
1472 self.loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1473 if self._options.scenario ==
'HeavyIons':
1474 self.renameInputTagsInSequence(
"SimL1Emulator",
"rawDataCollector",
"rawDataRepacker")
1475 self.scheduleSequence(
'SimL1Emulator',
'L1RePack_step')
1477 print "L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported
1482 """ Enrich the schedule with the HLT simulation step""" 1484 print "no specification of the hlt menu has been given, should never happen" 1485 raise Exception(
'no HLT sequence provided')
1489 from Configuration.HLT.autoHLT
import autoHLT
1492 sequence = autoHLT[key]
1494 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1500 if self._options.scenario ==
'HeavyIons':
1501 optionsForHLT[
'type'] =
'HIon' 1503 optionsForHLT[
'type'] =
'GRun' 1504 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in optionsForHLT.iteritems())
1505 if sequence ==
'run,fromSource':
1506 if hasattr(self.process.source,
'firstRun'):
1507 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1508 elif hasattr(self.process.source,
'setRunNumber'):
1509 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1511 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1513 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1517 if self._options.isMC:
1518 self._options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1520 if self._options.name !=
'HLT':
1521 self.additionalCommands.append(
'from HLTrigger.Configuration.CustomConfigs import ProcessName')
1522 self.additionalCommands.append(
'process = ProcessName(process)')
1523 self.additionalCommands.append(
'')
1524 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1527 self.schedule.append(self.process.HLTSchedule)
1528 [self.blacklist_paths.append(path)
for path
in self.process.HLTSchedule
if isinstance(path,(cms.Path,cms.EndPath))]
1531 if self._options.fast:
1532 if not hasattr(self.process,
'HLTEndSequence'):
1533 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1538 seqReco=sequence.split(
',')[1]
1539 seqDigi=sequence.split(
',')[0]
1541 print "RAW2RECO requires two specifications",sequence,
"insufficient" 1555 self.
loadAndRemember(
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1557 for filt
in allMetFilterPaths:
1558 self.schedule.append(getattr(self.process,
'Flag_'+filt))
1561 ''' Enrich the schedule with L1 HW validation ''' 1564 print '\n\n\n DEPRECATED this has no action \n\n\n' 1568 ''' Enrich the schedule with L1 reconstruction ''' 1574 ''' Enrich the schedule with L1 reconstruction ''' 1580 ''' Enrich the schedule with a user defined filter sequence ''' 1582 filterConfig=self.load(sequence.split(
'.')[0])
1583 filterSeq=sequence.split(
'.')[-1]
1591 label=visitee.label()
1599 getattr(self.process,filterSeq).
visit( expander )
1600 self._options.inlineObjets+=
','+expander.inliner
1601 self._options.inlineObjets+=
','+filterSeq
1604 self.scheduleSequence(filterSeq,
'filtering_step')
1605 self.nextScheduleIsConditional=
True 1607 self.productionFilterSequence = filterSeq
1612 ''' Enrich the schedule with reconstruction ''' 1618 ''' Enrich the schedule with reconstruction ''' 1624 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1625 if not self._options.fast:
1626 print "ERROR: this step is only implemented for FastSim" 1629 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1633 ''' Enrich the schedule with PAT ''' 1636 self.labelsToAssociate.append(
'patTask')
1637 if not self._options.runUnscheduled:
1638 raise Exception(
"MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1639 if self._options.isData:
1640 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1642 if self._options.fast:
1643 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1645 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1647 if self._options.hltProcess:
1648 if len(self._options.customise_commands) > 1:
1649 self._options.customise_commands = self._options.customise_commands +
" \n" 1650 self._options.customise_commands = self._options.customise_commands +
"process.patTrigger.processName = \""+self._options.hltProcess+
"\"\n" 1651 self._options.customise_commands = self._options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1658 ''' Enrich the schedule with PATGEN ''' 1660 self.labelsToAssociate.append(
'patGENTask')
1661 if not self._options.runUnscheduled:
1662 raise Exception(
"MiniGEN production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1663 if self._options.isData:
1664 raise Exception(
"PATGEN step can only run on MC")
1668 ''' Enrich the schedule with NANO ''' 1671 custom =
"nanoAOD_customizeData" if self._options.isData
else "nanoAOD_customizeMC" 1672 if self._options.runUnscheduled:
1673 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1675 self._options.customisation_file.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1676 if self._options.hltProcess:
1677 if len(self._options.customise_commands) > 1:
1678 self._options.customise_commands = self._options.customise_commands +
" \n" 1679 self._options.customise_commands = self._options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1683 ''' Enrich the schedule with event interpretation ''' 1684 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1685 if sequence
in EventInterpretation:
1686 self.EIDefaultCFF = EventInterpretation[sequence]
1687 sequence =
'EIsequence' 1689 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1690 self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1691 self.scheduleSequence(sequence.split(
'.')[-1],
'eventinterpretaion_step')
1695 ''' Enrich the schedule with skimming fragments''' 1697 sequence = sequence.split(
'.')[-1]
1699 skimlist=sequence.split(
'+')
1701 from Configuration.Skimming.autoSkim
import autoSkim
1705 for skim
in skimConfig.__dict__:
1706 skimstream = getattr(skimConfig,skim)
1707 if isinstance(skimstream,cms.Path):
1709 self.blacklist_paths.append(skimstream)
1710 if (
not isinstance(skimstream,cms.FilteredStream)):
1712 shortname = skim.replace(
'SKIMStream',
'')
1713 if (sequence==
"all"):
1715 elif (shortname
in skimlist):
1718 if self._options.datatier==
'DQM':
1719 self.process.load(self.EVTCONTDefaultCFF)
1720 skimstreamDQM = cms.FilteredStream(
1721 responsible = skimstream.responsible,
1722 name = skimstream.name+
'DQM',
1723 paths = skimstream.paths,
1724 selectEvents = skimstream.selectEvents,
1725 content = self._options.datatier+
'EventContent',
1726 dataTier = cms.untracked.string(self._options.datatier)
1729 for i
in range(skimlist.count(shortname)):
1730 skimlist.remove(shortname)
1734 if (skimlist.__len__()!=0
and sequence!=
"all"):
1735 print 'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist)
1736 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1739 ''' Enrich the schedule with a user defined sequence ''' 1745 """ Enrich the schedule with the postreco step """ 1752 print sequence,
"in preparing validation" 1754 from Validation.Configuration.autoValidation
import autoValidation
1756 sequence=sequence.split(
'.')[-1]
1757 if sequence.find(
',')!=-1:
1758 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1759 valSeqName=sequence.split(
',')[1].
split(
'+')
1764 prevalSeqName=sequence.split(
'+')
1765 valSeqName=sequence.split(
'+')
1771 postfix=
'_'+sequence
1772 prevalSeqName=[
'prevalidation'+postfix]
1773 valSeqName=[
'validation'+postfix]
1774 if not hasattr(self.process,valSeqName[0]):
1776 valSeqName=[sequence]
1787 if (
'HLT' in self.stepMap
and not self._options.fast)
or self._options.hltProcess:
1788 for s
in valSeqName+prevalSeqName:
1791 for (i,s)
in enumerate(prevalSeqName):
1793 setattr(self.process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1794 self.schedule.append(getattr(self.process,
'prevalidation_step%s'%NFI(i)))
1796 for (i,s)
in enumerate(valSeqName):
1797 setattr(self.process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1798 self.schedule.append(getattr(self.process,
'validation_step%s'%NFI(i)))
1801 if 'PAT' in self.stepMap
and not 'RECO' in self.stepMap:
1804 if not 'DIGI' in self.stepMap
and not self._options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1805 if self._options.restoreRNDSeeds==
False and not self._options.restoreRNDSeeds==
True:
1806 self._options.restoreRNDSeeds=
True 1808 if not 'DIGI' in self.stepMap
and not self._options.fast:
1812 self._options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1814 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1816 for (i,s)
in enumerate(valSeqName):
1817 getattr(self.process,
'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,
'validation_step%s'%NFI(i))._seq
1823 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1824 It will climb down within PSets, VPSets and VInputTags to find its target""" 1825 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1832 if isinstance(pset, cms._Parameterizable):
1833 for name
in pset.parameters_().
keys():
1839 value = getattr(pset,name)
1840 type = value.pythonTypeName()
1841 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1842 self.
doIt(value,base+
"."+name)
1843 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1844 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1845 elif type
in (
'cms.string',
'cms.untracked.string'):
1847 if self.
_verbose:
print "set string process name %s.%s %s ==> %s"% (base, name, value, self.
_paramReplace)
1849 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1850 for (i,n)
in enumerate(value):
1851 if not isinstance(n, cms.InputTag):
1855 if self.
_verbose:
print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self.
_paramReplace)
1858 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1859 for (i,n)
in enumerate(value):
1862 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1864 if self.
_verbose:
print "set process name %s.%s %s ==> %s " % (base, name, value, self.
_paramReplace)
1865 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1870 label = visitee.label()
1871 except AttributeError:
1872 label =
'<Module not in a Process>' 1874 label =
'other execption' 1875 self.
doIt(visitee, label)
1882 print "Replacing all InputTag %s => %s"%(oldT,newT)
1885 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1886 if not loadMe
in self.additionalCommands:
1887 self.additionalCommands.append(loadMe)
1888 self.additionalCommands.append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1892 if self._options.hltProcess:
1893 proc=self._options.hltProcess
1895 proc=self.process.name_()
1896 if proc==HLTprocess:
return 1898 print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1900 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1901 self.additionalCommands.append(
'from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1902 self.additionalCommands.append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1908 while '@' in repr(seqList)
and level<maxLevel:
1910 for specifiedCommand
in seqList:
1911 if specifiedCommand.startswith(
'@'):
1912 location=specifiedCommand[1:]
1913 if not location
in mapping:
1914 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1915 mappedTo=mapping[location]
1917 mappedTo=mappedTo[index]
1918 seqList.remove(specifiedCommand)
1919 seqList.extend(mappedTo.split(
'+'))
1922 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1928 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1929 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1930 from DQMOffline.Configuration.autoDQM
import autoDQM
1934 if len(set(sequenceList))!=len(sequenceList):
1935 sequenceList=
list(set(sequenceList))
1936 print "Duplicate entries for DQM:, using",sequenceList
1938 pathName=
'dqmoffline_step' 1939 for (i,sequence)
in enumerate(sequenceList):
1941 pathName=
'dqmoffline_%d_step'%(i)
1943 if 'HLT' in self.stepMap.keys()
or self._options.hltProcess:
1946 setattr(self.process,pathName, cms.EndPath( getattr(self.process,sequence ) ) )
1947 self.schedule.append(getattr(self.process,pathName))
1949 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1951 getattr(self.process,pathName).
insert(0,self.process.genstepfilter)
1953 pathName=
'dqmofflineOnPAT_step' 1954 for (i,sequence)
in enumerate(postSequenceList):
1956 pathName=
'dqmofflineOnPAT_%d_step'%(i)
1958 setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1959 self.schedule.append(getattr(self.process,pathName))
1962 """ Enrich the process with harvesting step """ 1963 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self._options.harvesting+
'_cff' 1967 sequence = sequence.split(
'.')[-1]
1970 harvestingList = sequence.split(
"+")
1971 from DQMOffline.Configuration.autoDQM
import autoDQM
1972 from Validation.Configuration.autoValidation
import autoValidation
1974 combined_mapping = copy.deepcopy( autoDQM )
1975 combined_mapping.update( autoValidation )
1976 self.
expandMapping(harvestingList,combined_mapping,index=-1)
1978 if len(set(harvestingList))!=len(harvestingList):
1979 harvestingList=
list(set(harvestingList))
1980 print "Duplicate entries for HARVESTING, using",harvestingList
1982 for name
in harvestingList:
1983 if not name
in harvestingConfig.__dict__:
1984 print name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1986 harvestingstream = getattr(harvestingConfig,name)
1987 if isinstance(harvestingstream,cms.Path):
1988 self.schedule.append(harvestingstream)
1989 self.blacklist_paths.append(harvestingstream)
1990 if isinstance(harvestingstream,cms.Sequence):
1991 setattr(self.process,name+
"_step",cms.Path(harvestingstream))
1992 self.schedule.append(getattr(self.process,name+
"_step"))
1998 """ Enrich the process with AlCaHarvesting step """ 2000 sequence=sequence.split(
".")[-1]
2003 harvestingList = sequence.split(
"+")
2007 from Configuration.AlCa.autoPCL
import autoPCL
2010 for name
in harvestingConfig.__dict__:
2011 harvestingstream = getattr(harvestingConfig,name)
2012 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2013 self.schedule.append(harvestingstream)
2014 if type(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput")) == cms.VPSet
and \
2015 type(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata")) == cms.VPSet:
2016 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2017 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2019 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2020 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2021 harvestingList.remove(name)
2023 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2024 self.schedule.append(lastStep)
2026 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2027 print "The following harvesting could not be found : ", harvestingList
2028 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2038 self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2039 self.schedule.append(self.process.reconstruction)
2043 """ Add useful info for the production. """ 2044 self.process.configurationMetadata=cms.untracked.PSet\
2045 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2046 name=cms.untracked.string(
"Applications"),
2047 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2050 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2055 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2056 self.
pythonCfgCode +=
"# with command line options: "+self._options.arguments+
'\n' 2057 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2062 modifierImports=[
'from Configuration.StandardSequences.Eras import eras']
2064 if hasattr(self.
_options,
"era")
and self._options.era :
2066 from Configuration.StandardSequences.Eras
import eras
2067 for requestedEra
in self._options.era.split(
",") :
2068 modifierStrings.append(
"eras."+requestedEra)
2069 modifiers.append(getattr(eras,requestedEra))
2072 if hasattr(self.
_options,
"procModifiers")
and self._options.procModifiers:
2075 for pm
in self._options.procModifiers.split(
','):
2076 modifierStrings.append(pm)
2077 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2078 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2081 self.
pythonCfgCode +=
"process = cms.Process('"+self._options.name+
"'" 2084 if len(modifierStrings)>0:
2090 if self.process ==
None:
2091 if len(modifiers)>0:
2092 self.process = cms.Process(self._options.name,*modifiers)
2094 self.process = cms.Process(self._options.name)
2100 """ Prepare the configuration string and add missing pieces.""" 2112 outputModuleCfgCode=
"" 2113 if not 'HARVESTING' in self.stepMap.keys()
and not 'ALCAHARVEST' in self.stepMap.keys()
and not 'ALCAOUTPUT' in self.stepMap.keys()
and self.with_output:
2118 self.
pythonCfgCode +=
"# import of standard configurations\n" 2119 for module
in self.imports:
2123 if not hasattr(self.process,
"configurationMetadata"):
2127 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2130 for comment,object
in self.addedObjects:
2142 nl=self.additionalOutputs.keys()
2145 output = self.additionalOutputs[name]
2146 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2147 tmpOut = cms.EndPath(output)
2148 setattr(self.process,name+
'OutPath',tmpOut)
2149 self.schedule.append(tmpOut)
2153 for command
in self.additionalCommands:
2157 for object
in self._options.inlineObjets.split(
','):
2160 if not hasattr(self.process,object):
2161 print 'cannot inline -'+object+
'- : not known' 2168 for path
in self.process.paths:
2169 if getattr(self.process,path)
not in self.blacklist_paths:
2172 for endpath
in self.process.endpaths:
2173 if getattr(self.process,endpath)
not in self.blacklist_paths:
2178 result =
"process.schedule = cms.Schedule(" 2181 self.process.schedule = cms.Schedule()
2182 for item
in self.schedule:
2183 if not isinstance(item, cms.Schedule):
2184 self.process.schedule.append(item)
2186 self.process.schedule.extend(item)
2188 if hasattr(self.process,
"HLTSchedule"):
2189 beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2190 afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2191 pathNames = [
'process.'+p.label_()
for p
in beforeHLT]
2192 result +=
','.
join(pathNames)+
')\n' 2193 result +=
'process.schedule.extend(process.HLTSchedule)\n' 2194 pathNames = [
'process.'+p.label_()
for p
in afterHLT]
2195 result +=
'process.schedule.extend(['+
','.
join(pathNames)+
'])\n' 2197 pathNames = [
'process.'+p.label_()
for p
in self.schedule]
2198 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2202 for labelToAssociate
in self.labelsToAssociate:
2203 self.process.schedule.associate(getattr(self.process, labelToAssociate))
2204 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2208 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2211 if self._options.nThreads
is not "1":
2214 self.
pythonCfgCode +=
"process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+
")\n" 2215 self.
pythonCfgCode +=
"process.options.numberOfStreams=cms.untracked.uint32(0)\n" 2217 if self._options.isRepacked:
2219 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2221 MassReplaceInputTag(self.process)
2224 if self.productionFilterSequence:
2225 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2227 if len(self.conditionalPaths):
2228 self.
pythonCfgCode +=
'\tif not path in %s: continue\n'%
str(self.conditionalPaths)
2229 if len(self.excludedPaths):
2231 self.
pythonCfgCode +=
'\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2232 pfs = getattr(self.process,self.productionFilterSequence)
2233 for path
in self.process.paths:
2234 if not path
in self.conditionalPaths:
continue 2235 if path
in self.excludedPaths:
continue 2236 getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2242 if self._options.runUnscheduled:
2245 self.
pythonCfgCode+=
"#do not add changes to your config after this point (unless you know what you are doing)\n" 2246 self.
pythonCfgCode+=
"from FWCore.ParameterSet.Utilities import convertToUnscheduled\n" 2247 self.
pythonCfgCode+=
"process=convertToUnscheduled(process)\n" 2249 from FWCore.ParameterSet.Utilities
import convertToUnscheduled
2256 if hasattr(self.process,
"logErrorHarvester"):
2258 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2259 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2260 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2261 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2268 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2269 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2270 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2272 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2278 if self._options.io:
2280 if not self._options.io.endswith(
'.io'): self._option.io+=
'.io' 2281 io=open(self._options.io,
'w')
2283 if hasattr(self.process.source,
"fileNames"):
2284 if len(self.process.source.fileNames.value()):
2285 ioJson[
'primary']=self.process.source.fileNames.value()
2286 if hasattr(self.process.source,
"secondaryFileNames"):
2287 if len(self.process.source.secondaryFileNames.value()):
2288 ioJson[
'secondary']=self.process.source.secondaryFileNames.value()
2289 if self._options.pileup_input
and (self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:')):
2290 ioJson[
'pileup']=self._options.pileup_input[4:]
2291 for (o,om)
in self.process.outputModules_().
items():
2292 ioJson[o]=om.fileName.value()
2293 ioJson[
'GT']=self.process.GlobalTag.globaltag.value()
2294 if self.productionFilterSequence:
2295 ioJson[
'filter']=self.productionFilterSequence
2297 io.write(json.dumps(ioJson))
def load(self, includeFile)
def filesFromOption(self)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def prepare_L1REPACK(self, sequence=None)
def finalizeFastSimHLT(self)
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def doNotInlineEventContent(instance, label="cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)")
event content
def customiseEarlyDelete(process)
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_RECO(self, sequence="reconstruction")
put the filtering path in the schedule
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def prepare_PATGEN(self, sequence="miniGEN")
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def prepare_RECOSIM(self, sequence="recosim")
def addCustomise(self, unsch=0)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
now for a useful command
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, sequence=None)
def convertToUnscheduled(proc)
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_NANO(self, sequence="nanoAOD")
inliner
needs to be in reverse order
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
def prepare_ALCAHARVEST(self, sequence=None)
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def completeInputCommand(self)
add the corresponding input content
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def addStandardSequences(self)
def prepare_PAT(self, sequence="miniAOD")