3 __version__ =
"$Revision: 1.19 $" 4 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 6 import FWCore.ParameterSet.Config
as cms
7 from FWCore.ParameterSet.Modules
import _Module
12 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
17 from subprocess
import Popen,PIPE
18 import FWCore.ParameterSet.DictTypes
as DictTypes
24 defaultOptions.datamix =
'DataOnSim' 25 defaultOptions.isMC=
False 26 defaultOptions.isData=
True 27 defaultOptions.step=
'' 28 defaultOptions.pileup=
'NoPileUp' 29 defaultOptions.pileup_input =
None 30 defaultOptions.pileup_dasoption =
'' 31 defaultOptions.geometry =
'SimDB' 32 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
33 defaultOptions.magField =
'' 34 defaultOptions.conditions =
None 35 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
36 defaultOptions.harvesting=
'AtRunEnd' 37 defaultOptions.gflash =
False 38 defaultOptions.number = -1
39 defaultOptions.number_out =
None 40 defaultOptions.arguments =
"" 41 defaultOptions.name =
"NO NAME GIVEN" 42 defaultOptions.evt_type =
"" 43 defaultOptions.filein =
"" 44 defaultOptions.dasquery=
"" 45 defaultOptions.dasoption=
"" 46 defaultOptions.secondfilein =
"" 47 defaultOptions.customisation_file = []
48 defaultOptions.customisation_file_unsch = []
49 defaultOptions.customise_commands =
"" 50 defaultOptions.inline_custom=
False 51 defaultOptions.particleTable =
'pythiapdt' 52 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
53 defaultOptions.dirin =
'' 54 defaultOptions.dirout =
'' 55 defaultOptions.filetype =
'EDM' 56 defaultOptions.fileout =
'output.root' 57 defaultOptions.filtername =
'' 58 defaultOptions.lazy_download =
False 59 defaultOptions.custom_conditions =
'' 60 defaultOptions.hltProcess =
'' 61 defaultOptions.eventcontent =
None 62 defaultOptions.datatier =
None 63 defaultOptions.inlineEventContent =
True 64 defaultOptions.inlineObjets =
'' 65 defaultOptions.hideGen=
False 66 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
67 defaultOptions.beamspot=
None 68 defaultOptions.outputDefinition =
'' 69 defaultOptions.inputCommands =
None 70 defaultOptions.outputCommands =
None 71 defaultOptions.inputEventContent =
'' 72 defaultOptions.dropDescendant =
False 73 defaultOptions.relval =
None 74 defaultOptions.profile =
None 75 defaultOptions.isRepacked =
False 76 defaultOptions.restoreRNDSeeds =
False 77 defaultOptions.donotDropOnInput =
'' 78 defaultOptions.python_filename =
'' 79 defaultOptions.io=
None 80 defaultOptions.lumiToProcess=
None 81 defaultOptions.fast=
False 82 defaultOptions.runsAndWeightsForMC =
None 83 defaultOptions.runsScenarioForMC =
None 84 defaultOptions.runUnscheduled =
False 85 defaultOptions.timeoutOutput =
False 86 defaultOptions.nThreads =
'1' 90 theObject = getattr(process,name)
91 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
92 return "process."+name+
" = " + theObject.dumpPython(
"process")
93 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
94 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 96 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 99 import FWCore.ParameterSet.Config
as cms
102 for line
in open(fileName,
'r'): 103 if line.count(
".root")>=2:
105 entries=line.replace(
"\n",
"").
split()
106 prim.append(entries[0])
107 sec.append(entries[1])
108 elif (line.find(
".root")!=-1):
109 entry=line.replace(
"\n",
"")
112 prim = sorted(
list(set(prim)))
113 sec = sorted(
list(set(sec)))
115 if not hasattr(s,
"fileNames"):
116 s.fileNames=cms.untracked.vstring(prim)
118 s.fileNames.extend(prim)
120 if not hasattr(s,
"secondaryFileNames"):
121 s.secondaryFileNames=cms.untracked.vstring(sec)
123 s.secondaryFileNames.extend(sec)
124 print "found files: ",prim
126 raise Exception(
"There are not files in input from the file list")
128 print "found parent files:",sec
133 import FWCore.ParameterSet.Config
as cms
136 print "the query is",query
139 while eC!=0
and count<3:
141 print 'Sleeping, then retrying DAS' 143 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True)
145 tupleP = os.waitpid(p.pid, 0)
149 print "DAS succeeded after",count,
"attempts",eC
151 print "DAS failed 3 times- I give up" 152 for line
in pipe.split(
'\n'):
153 if line.count(
".root")>=2:
155 entries=line.replace(
"\n",
"").
split()
156 prim.append(entries[0])
157 sec.append(entries[1])
158 elif (line.find(
".root")!=-1):
159 entry=line.replace(
"\n",
"")
162 prim = sorted(
list(set(prim)))
163 sec = sorted(
list(set(sec)))
165 if not hasattr(s,
"fileNames"):
166 s.fileNames=cms.untracked.vstring(prim)
168 s.fileNames.extend(prim)
170 if not hasattr(s,
"secondaryFileNames"):
171 s.secondaryFileNames=cms.untracked.vstring(sec)
173 s.secondaryFileNames.extend(sec)
174 print "found files: ",prim
176 print "found parent files:",sec
179 def anyOf(listOfKeys,dict,opt=None):
188 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
191 """The main building routines """ 193 def __init__(self, options, process = None, with_output = False, with_input = False ):
194 """options taken from old cmsDriver and optparse """ 196 options.outfile_name = options.dirout+options.fileout
200 if self._options.isData
and options.isMC:
201 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
206 if 'ENDJOB' in self._options.step:
207 if (hasattr(self.
_options,
"outputDefinition")
and \
208 self._options.outputDefinition !=
'' and \
209 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self._options.outputDefinition)))
or \
210 (hasattr(self.
_options,
"datatier")
and \
211 self._options.datatier
and \
212 'DQMIO' in self._options.datatier):
213 print "removing ENDJOB from steps since not compatible with DQMIO dataTier" 214 self._options.step=self._options.step.replace(
',ENDJOB',
'')
219 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
222 for step
in self._options.step.split(
","):
223 if step==
'':
continue 224 stepParts = step.split(
":")
225 stepName = stepParts[0]
226 if stepName
not in stepList
and not stepName.startswith(
're'):
227 raise ValueError(
"Step "+stepName+
" unknown")
228 if len(stepParts)==1:
229 self.stepMap[stepName]=
"" 230 elif len(stepParts)==2:
231 self.stepMap[stepName]=stepParts[1].
split(
'+')
232 elif len(stepParts)==3:
233 self.stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
235 raise ValueError(
"Step definition "+step+
" invalid")
236 self.stepKeys.append(stepName)
240 self.with_output = with_output
243 if hasattr(self.
_options,
"no_output_flag")
and self._options.no_output_flag:
244 self.with_output =
False 245 self.with_input = with_input
249 self.schedule =
list()
255 self.additionalCommands = []
257 self.blacklist_paths = []
258 self.addedObjects = []
259 self.additionalOutputs = {}
261 self.productionFilterSequence =
None 262 self.labelsToAssociate=[]
263 self.nextScheduleIsConditional=
False 264 self.conditionalPaths=[]
265 self.excludedPaths=[]
270 Function to add the igprof profile service so that you can dump in the middle 273 profileOpts = self._options.profile.split(
':')
275 profilerInterval = 100
276 profilerFormat =
None 277 profilerJobFormat =
None 283 startEvent = profileOpts.pop(0)
284 if not startEvent.isdigit():
285 raise Exception(
"%s is not a number" % startEvent)
286 profilerStart =
int(startEvent)
288 eventInterval = profileOpts.pop(0)
289 if not eventInterval.isdigit():
290 raise Exception(
"%s is not a number" % eventInterval)
291 profilerInterval =
int(eventInterval)
293 profilerFormat = profileOpts.pop(0)
296 if not profilerFormat:
297 profilerFormat =
"%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace(
"_cfi",
""),
299 self._options.pileup,
300 self._options.conditions,
301 self._options.datatier,
302 self._options.profileTypeLabel)
303 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
304 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
305 elif not profilerJobFormat:
306 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 308 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
311 includeFile = includeFile.replace(
'/',
'.')
312 self.process.load(includeFile)
313 return sys.modules[includeFile]
316 """helper routine to load am memorize imports""" 319 includeFile = includeFile.replace(
'/',
'.')
320 self.imports.append(includeFile)
321 self.process.load(includeFile)
322 return sys.modules[includeFile]
325 """helper routine to remember replace statements""" 326 self.additionalCommands.append(command)
327 if not command.strip().startswith(
"#"):
330 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
334 if 'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys():
335 self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring(
'ProductNotFound'),fileMode = cms.untracked.string(
'FULLMERGE'))
337 self.process.options = cms.untracked.PSet( )
339 self.addedObjects.append((
"",
"options"))
341 if self._options.lazy_download:
342 self.process.AdaptorConfig = cms.Service(
"AdaptorConfig",
343 stats = cms.untracked.bool(
True),
344 enable = cms.untracked.bool(
True),
345 cacheHint = cms.untracked.string(
"lazy-download"),
346 readHint = cms.untracked.string(
"read-ahead-buffered")
348 self.addedObjects.append((
"Setup lazy download",
"AdaptorConfig"))
353 if self._options.profile:
355 self.process.IgProfService = cms.Service(
"IgProfService",
356 reportFirstEvent = cms.untracked.int32(start),
357 reportEventInterval = cms.untracked.int32(interval),
358 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
359 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
360 self.addedObjects.append((
"Setup IGProf Service for profiling",
"IgProfService"))
363 """Here we decide how many evts will be processed""" 364 self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(
int(self._options.number)))
365 if self._options.number_out:
366 self.process.maxEvents.output = cms.untracked.int32(
int(self._options.number_out))
367 self.addedObjects.append((
"",
"maxEvents"))
370 """Here the source is built. Priority: file, generator""" 371 self.addedObjects.append((
"Input source",
"source"))
374 for entry
in self._options.filein.split(
','):
376 if entry.startswith(
"filelist:"):
378 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
379 filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
381 self.process.source.fileNames.append(self._options.dirin+entry)
382 if self._options.secondfilein:
383 if not hasattr(self.process.source,
"secondaryFileNames"):
384 raise Exception(
"--secondfilein not compatible with "+self._options.filetype+
"input type")
385 for entry
in self._options.secondfilein.split(
','):
387 if entry.startswith(
"filelist:"):
388 self.process.source.secondaryFileNames.extend((
filesFromList(entry[9:]))[0])
389 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
390 self.process.source.secondaryFileNames.extend((
filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
392 self.process.source.secondaryFileNames.append(self._options.dirin+entry)
394 if self._options.filein
or self._options.dasquery:
395 if self._options.filetype ==
"EDM":
396 self.process.source=cms.Source(
"PoolSource",
397 fileNames = cms.untracked.vstring(),
398 secondaryFileNames= cms.untracked.vstring())
400 elif self._options.filetype ==
"DAT":
401 self.process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
403 elif self._options.filetype ==
"LHE":
404 self.process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
405 if self._options.filein.startswith(
"lhe:"):
407 args=self._options.filein.split(
':')
409 print 'LHE input from article ',article
410 location=
'/store/lhe/' 412 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
413 for line
in textOfFiles:
414 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
415 self.process.source.fileNames.append(location+article+
'/'+fileName)
418 print 'Issue to load LHE files, please check and try again.' 421 if len(self.process.source.fileNames)==0:
422 print 'Issue with empty filename, but can pass line check' 425 self.process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
429 elif self._options.filetype ==
"DQM":
430 self.process.source=cms.Source(
"DQMRootSource",
431 fileNames = cms.untracked.vstring())
434 elif self._options.filetype ==
"DQMDAQ":
436 self.process.source=cms.Source(
"DQMStreamerReader")
439 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
440 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
442 if self._options.dasquery!=
'':
443 self.process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
444 filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
446 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
447 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
450 if 'GEN' in self.stepMap.keys():
451 if self._options.inputCommands:
452 self._options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 454 self._options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 456 if self.process.source
and self._options.inputCommands:
457 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
458 for command
in self._options.inputCommands.split(
','):
460 command = command.strip()
461 if command==
'':
continue 462 self.process.source.inputCommands.append(command)
463 if not self._options.dropDescendant:
464 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
466 if self._options.lumiToProcess:
467 import FWCore.PythonUtilities.LumiList
as LumiList
468 self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange(
LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().
split(
',') )
470 if 'GEN' in self.stepMap.keys()
or 'LHE' in self.stepMap
or (
not self._options.filein
and hasattr(self.
_options,
"evt_type")):
471 if self.process.source
is None:
472 self.process.source=cms.Source(
"EmptySource")
475 self.runsAndWeights=
None 476 if self._options.runsAndWeightsForMC
or self._options.runsScenarioForMC :
477 if not self._options.isMC :
478 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
479 if self._options.runsAndWeightsForMC:
480 self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
482 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
483 if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
484 __import__(RunsAndWeights[self._options.runsScenarioForMC])
485 self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
487 self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
489 if self.runsAndWeights:
490 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
492 self.additionalCommands.append(
'import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
493 self.additionalCommands.append(
'ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
498 """ Add output module to the process """ 500 if self._options.outputDefinition:
501 if self._options.datatier:
502 print "--datatier & --eventcontent options ignored" 505 outList = eval(self._options.outputDefinition)
506 for (id,outDefDict)
in enumerate(outList):
507 outDefDictStr=outDefDict.__str__()
508 if not isinstance(outDefDict,dict):
509 raise Exception(
"--output needs to be passed a list of dict"+self._options.outputDefinition+
" is invalid")
511 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
514 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
515 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
516 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
517 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
518 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
520 if not theModuleLabel:
521 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
522 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
523 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 525 for name
in tryNames:
526 if not hasattr(self.process,name):
529 if not theModuleLabel:
530 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
532 defaultFileName=self._options.outfile_name
534 defaultFileName=self._options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
536 theFileName=self._options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
537 if not theFileName.endswith(
'.root'):
541 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
542 if theStreamType==
'DQMIO': theStreamType=
'DQM' 543 if theStreamType==
'ALL':
544 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
546 theEventContent = getattr(self.process, theStreamType+
"EventContent")
550 if theStreamType==
'ALCARECO' and not theFilterName:
551 theFilterName=
'StreamALCACombined' 554 CppType=
'PoolOutputModule' 555 if self._options.timeoutOutput:
556 CppType=
'TimeoutPoolOutputModule' 557 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 558 output = cms.OutputModule(CppType,
559 theEventContent.clone(),
560 fileName = cms.untracked.string(theFileName),
561 dataset = cms.untracked.PSet(
562 dataTier = cms.untracked.string(theTier),
563 filterName = cms.untracked.string(theFilterName))
565 if not theSelectEvent
and hasattr(self.process,
'generation_step')
and theStreamType!=
'LHE':
566 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
567 if not theSelectEvent
and hasattr(self.process,
'filtering_step'):
568 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
570 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
573 if not hasattr(output,
'SelectEvents'):
574 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
575 for alca
in self.AlCaPaths:
576 output.SelectEvents.SelectEvents.extend(getattr(self.process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
579 if hasattr(self.process,theModuleLabel):
580 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
582 setattr(self.process,theModuleLabel,output)
583 outputModule=getattr(self.process,theModuleLabel)
584 setattr(self.process,theModuleLabel+
'_step',cms.EndPath(outputModule))
585 path=getattr(self.process,theModuleLabel+
'_step')
586 self.schedule.append(path)
588 if not self._options.inlineEventContent
and hasattr(self.process,theStreamType+
"EventContent"):
591 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
592 if theExtraOutputCommands:
593 if not isinstance(theExtraOutputCommands,list):
594 raise Exception(
"extra ouput command in --option must be a list of strings")
595 if hasattr(self.process,theStreamType+
"EventContent"):
596 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
598 outputModule.outputCommands.extend(theExtraOutputCommands)
600 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
605 streamTypes=self._options.eventcontent.split(
',')
606 tiers=self._options.datatier.split(
',')
607 if not self._options.outputDefinition
and len(streamTypes)!=len(tiers):
608 raise Exception(
"number of event content arguments does not match number of datatier arguments")
611 if self._options.step.split(
',')[0].
split(
':')[0] ==
'ALCA':
614 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
615 if streamType==
'':
continue 616 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self._options.step:
continue 617 if streamType==
'DQMIO': streamType=
'DQM' 618 eventContent=streamType
620 if streamType ==
"NANOEDMAOD" :
621 eventContent =
"NANOAOD" 622 elif streamType ==
"NANOEDMAODSIM" :
623 eventContent =
"NANOAODSIM" 624 theEventContent = getattr(self.process, eventContent+
"EventContent")
626 theFileName=self._options.outfile_name
627 theFilterName=self._options.filtername
629 theFileName=self._options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
630 theFilterName=self._options.filtername
631 CppType=
'PoolOutputModule' 632 if self._options.timeoutOutput:
633 CppType=
'TimeoutPoolOutputModule' 634 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 635 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 636 output = cms.OutputModule(CppType,
638 fileName = cms.untracked.string(theFileName),
639 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
640 filterName = cms.untracked.string(theFilterName)
643 if hasattr(self.process,
"generation_step")
and streamType!=
'LHE':
644 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
645 if hasattr(self.process,
"filtering_step"):
646 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
648 if streamType==
'ALCARECO':
649 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
651 if "MINIAOD" in streamType:
652 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
655 outputModuleName=streamType+
'output' 656 setattr(self.process,outputModuleName,output)
657 outputModule=getattr(self.process,outputModuleName)
658 setattr(self.process,outputModuleName+
'_step',cms.EndPath(outputModule))
659 path=getattr(self.process,outputModuleName+
'_step')
660 self.schedule.append(path)
662 if self._options.outputCommands
and streamType!=
'DQM':
663 for evct
in self._options.outputCommands.split(
','):
664 if not evct:
continue 665 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
667 if not self._options.inlineEventContent:
668 tmpstreamType=streamType
669 if "NANOEDM" in tmpstreamType :
670 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
673 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
675 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
681 Add selected standard sequences to the process 684 if self._options.pileup:
685 pileupSpec=self._options.pileup.split(
',')[0]
688 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
689 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
690 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
694 if '.' in pileupSpec:
695 mixingDict={
'file':pileupSpec}
696 elif pileupSpec.startswith(
'file:'):
697 mixingDict={
'file':pileupSpec[5:]}
700 mixingDict=copy.copy(Mixing[pileupSpec])
701 if len(self._options.pileup.split(
','))>1:
702 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(
',')+1:]))
705 if 'file:' in pileupSpec:
707 self.process.load(mixingDict[
'file'])
708 print "inlining mixing module configuration" 709 self._options.inlineObjets+=
',mix' 711 self.loadAndRemember(mixingDict[
'file'])
713 mixingDict.pop(
'file')
714 if not "DATAMIX" in self.stepMap.keys():
715 if self._options.pileup_input:
716 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
717 mixingDict[
'F']=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
718 elif self._options.pileup_input.startswith(
"filelist:"):
719 mixingDict[
'F']=(
filesFromList(self._options.pileup_input[9:]))[0]
721 mixingDict[
'F']=self._options.pileup_input.split(
',')
723 for command
in specialization:
724 self.executeAndRemember(command)
725 if len(mixingDict)!=0:
726 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
731 if len(self.stepMap):
732 self.loadAndRemember(self.GeometryCFF)
733 if (
'SIM' in self.stepMap
or 'reSIM' in self.stepMap)
and not self._options.fast:
734 self.loadAndRemember(self.SimGeometryCFF)
735 if self.geometryDBLabel:
736 self.executeAndRemember(
'process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
738 print "Geometry option",self._options.geometry,
"unknown." 741 if len(self.stepMap):
742 self.loadAndRemember(self.magFieldCFF)
744 for stepName
in self.stepKeys:
745 stepSpec = self.stepMap[stepName]
746 print "Step:", stepName,
"Spec:",stepSpec
747 if stepName.startswith(
're'):
749 if stepName[2:]
not in self._options.donotDropOnInput:
750 self._options.inputEventContent=
'%s,%s'%(stepName.upper(),self._options.inputEventContent)
751 stepName=stepName[2:]
753 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
754 elif isinstance(stepSpec, list):
755 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
756 elif isinstance(stepSpec, tuple):
757 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
759 raise ValueError(
"Invalid step definition")
761 if self._options.restoreRNDSeeds!=
False:
763 if self._options.restoreRNDSeeds==
True:
764 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
766 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
767 if self._options.inputEventContent
or self._options.inputCommands:
768 if self._options.inputCommands:
769 self._options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 771 self._options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 775 if self._options.inputEventContent:
777 def dropSecondDropStar(iec):
788 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
789 for evct
in self._options.inputEventContent.split(
','):
790 if evct==
'':
continue 791 theEventContent = getattr(self.process, evct+
"EventContent")
792 if hasattr(theEventContent,
'outputCommands'):
793 self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
794 if hasattr(theEventContent,
'inputCommands'):
795 self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
797 dropSecondDropStar(self.process.source.inputCommands)
799 if not self._options.dropDescendant:
800 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
806 """Add conditions to the process""" 807 if not self._options.conditions:
return 809 if 'FrontierConditions_GlobalTag' in self._options.conditions:
810 print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line' 811 self._options.conditions = self._options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
813 self.loadAndRemember(self.ConditionsDefaultCFF)
815 self.process.GlobalTag =
GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
816 self.additionalCommands.append(
'from Configuration.AlCa.GlobalTag import GlobalTag')
817 self.additionalCommands.append(
'process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
821 """Include the customise code """ 825 for c
in self._options.customisation_file:
826 custOpt.extend(c.split(
","))
828 for c
in self._options.customisation_file_unsch:
829 custOpt.extend(c.split(
","))
835 raise Exception(
"more than . in the specification:"+opt)
836 fileName=opt.split(
'.')[0]
837 if opt.count(
'.')==0: rest=
'customise' 839 rest=opt.split(
'.')[1]
840 if rest==
'py': rest=
'customise' 842 if fileName
in custMap:
843 custMap[fileName].extend(rest.split(
'+'))
845 custMap[fileName]=rest.split(
'+')
850 final_snippet=
'\n# customisation of the process.\n' 854 allFcn.extend(custMap[opt])
856 if allFcn.count(fcn)!=1:
857 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
861 packageName = f.replace(
".py",
"").
replace(
"/",
".")
862 __import__(packageName)
863 package = sys.modules[packageName]
866 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
868 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 869 if self._options.inline_custom:
870 for line
in file(customiseFile,
'r'): 871 if "import FWCore.ParameterSet.Config" in line:
873 final_snippet += line
875 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
876 for fcn
in custMap[f]:
877 print "customising the process with",fcn,
"from",f
878 if not hasattr(package,fcn):
880 raise Exception(
"config "+f+
" has no function "+fcn)
882 self.process=getattr(package,fcn)(self.process)
884 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
885 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
888 final_snippet +=
'\n# End of customisation functions\n' 894 final_snippet=
'\n# Customisation from command line\n' 895 if self._options.customise_commands:
897 for com
in self._options.customise_commands.split(
'\\n'):
898 com=string.lstrip(com)
900 final_snippet +=
'\n'+com
909 if len(self.stepMap):
911 if self._options.particleTable
not in defaultOptions.particleTableList:
912 print 'Invalid particle table provided. Options are:' 913 print defaultOptions.particleTable
916 if len(self.stepMap):
917 self.
loadAndRemember(
'SimGeneral.HepPDTESSource.'+self._options.particleTable+
'_cfi')
936 self.EIDefaultCFF=
None 937 self.SKIMDefaultCFF=
"Configuration/StandardSequences/Skims_cff" 938 self.POSTRECODefaultCFF=
"Configuration/StandardSequences/PostRecoGenerator_cff" 939 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/Validation_cff" 940 self.L1HwValDefaultCFF =
"Configuration/StandardSequences/L1HwVal_cff" 941 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOffline_cff" 942 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/Harvesting_cff" 943 self.ALCAHARVESTDefaultCFF=
"Configuration/StandardSequences/AlCaHarvesting_cff" 944 self.ENDJOBDefaultCFF=
"Configuration/StandardSequences/EndOfProcess_cff" 945 self.ConditionsDefaultCFF =
"Configuration/StandardSequences/FrontierConditions_GlobalTag_cff" 946 self.CFWRITERDefaultCFF =
"Configuration/StandardSequences/CrossingFrameWriter_cff" 947 self.REPACKDefaultCFF=
"Configuration/StandardSequences/DigiToRaw_Repack_cff" 949 if "DATAMIX" in self.stepMap.keys():
950 self.DATAMIXDefaultCFF=
"Configuration/StandardSequences/DataMixer"+self._options.datamix+
"_cff" 953 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 955 self.ALCADefaultSeq=
None 956 self.LHEDefaultSeq=
'externalLHEProducer' 957 self.GENDefaultSeq=
'pgen' 958 self.SIMDefaultSeq=
'psim' 959 self.DIGIDefaultSeq=
'pdigi' 960 self.DATAMIXDefaultSeq=
None 961 self.DIGI2RAWDefaultSeq=
'DigiToRaw' 962 self.HLTDefaultSeq=
'GRun' 963 self.L1DefaultSeq=
None 964 self.L1REPACKDefaultSeq=
'GT' 965 self.HARVESTINGDefaultSeq=
None 966 self.ALCAHARVESTDefaultSeq=
None 967 self.CFWRITERDefaultSeq=
None 968 self.RAW2DIGIDefaultSeq=
'RawToDigi' 969 self.L1RecoDefaultSeq=
'L1Reco' 970 self.L1TrackTriggerDefaultSeq=
'L1TrackTrigger' 971 if self._options.fast
or (
'RAW2DIGI' in self.stepMap
and 'RECO' in self.stepMap):
972 self.RECODefaultSeq=
'reconstruction' 974 self.RECODefaultSeq=
'reconstruction_fromRECO' 975 self.RECOSIMDefaultSeq=
'recosim' 976 self.EIDefaultSeq=
'top' 977 self.POSTRECODefaultSeq=
None 978 self.L1HwValDefaultSeq=
'L1HwVal' 979 self.DQMDefaultSeq=
'DQMOffline' 980 self.VALIDATIONDefaultSeq=
'' 981 self.ENDJOBDefaultSeq=
'endOfProcess' 982 self.REPACKDefaultSeq=
'DigiToRawRepack' 983 self.PATDefaultSeq=
'miniAOD' 984 self.PATGENDefaultSeq=
'miniGEN' 985 self.NANODefaultSeq=
'nanoSequence' 987 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContent_cff" 989 if not self._options.beamspot:
990 self._options.beamspot=VtxSmearedDefaultKey
993 if self._options.isMC==
True:
995 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 996 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 997 self.PATGENDefaultCFF=
"Configuration/StandardSequences/PATGEN_cff" 998 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineMC_cff" 999 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1000 self.NANODefaultSeq=
'nanoSequenceMC' 1002 self._options.beamspot =
None 1005 if 'reGEN' in self.stepMap:
1006 self.GENDefaultSeq=
'fixGenInfo' 1008 if self._options.scenario==
'cosmics':
1009 self._options.pileup=
'Cosmics' 1010 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1011 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1012 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1013 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentCosmics_cff" 1014 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationCosmics_cff" 1015 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmics_cff" 1016 if self._options.isMC==
True:
1017 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmicsMC_cff" 1018 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingCosmics_cff" 1019 self.RECODefaultSeq=
'reconstructionCosmics' 1020 self.DQMDefaultSeq=
'DQMOfflineCosmics' 1022 if self._options.scenario==
'HeavyIons':
1023 if not self._options.beamspot:
1024 self._options.beamspot=VtxSmearedHIDefaultKey
1025 self.HLTDefaultSeq =
'HIon' 1026 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationHeavyIons_cff" 1027 self.VALIDATIONDefaultSeq=
'' 1028 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentHeavyIons_cff" 1029 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1030 self.RECODefaultSeq=
'reconstructionHeavyIons' 1031 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1032 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIons_cff" 1033 self.DQMDefaultSeq=
'DQMOfflineHeavyIons' 1034 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1035 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingHeavyIons_cff" 1036 if self._options.isMC==
True:
1037 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff" 1040 self.RAW2RECODefaultSeq=
','.
join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1042 self.USERDefaultSeq=
'user' 1043 self.USERDefaultCFF=
None 1046 if self._options.isData:
1047 if self._options.magField==defaultOptions.magField:
1048 print "magnetic field option forced to: AutoFromDBCurrent" 1049 self._options.magField=
'AutoFromDBCurrent' 1050 self.magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace(
'.',
'')+
'_cff' 1051 self.magFieldCFF = self.magFieldCFF.replace(
"__",
'_')
1054 self.GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff' 1055 self.geometryDBLabel=
None 1057 if self._options.fast:
1058 if 'start' in self._options.conditions.lower():
1059 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1061 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1064 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1065 if opt
in GeometryConf:
1066 return GeometryConf[opt]
1070 geoms=self._options.geometry.split(
',')
1074 if '/' in geoms[1]
or '_cff' in geoms[1]:
1075 self.GeometryCFF=geoms[1]
1077 self.GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1079 if (geoms[0].startswith(
'DB:')):
1080 self.SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff' 1081 self.geometryDBLabel=geoms[0][3:]
1084 if '/' in geoms[0]
or '_cff' in geoms[0]:
1085 self.SimGeometryCFF=geoms[0]
1087 simGeometry=geoms[0]
1088 if self._options.gflash==
True:
1089 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1091 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1094 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1095 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1097 if self._options.scenario==
'nocoll' or self._options.scenario==
'cosmics':
1098 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1099 self._options.beamspot=
'NoSmear' 1102 if self._options.fast:
1103 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1104 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1105 self.RECOBEFMIXDefaultCFF =
'FastSimulation.Configuration.Reconstruction_BefMix_cff' 1106 self.RECOBEFMIXDefaultSeq =
'reconstruction_befmix' 1107 self.NANODefaultSeq =
'nanoSequenceFS' 1108 self.DQMOFFLINEDefaultCFF=
"FastSimulation.Configuration.DQMOfflineMC_cff" 1111 if self._options.pileup==
'default':
1112 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1113 self._options.pileup=MixingDefaultKey
1117 if self._options.isData:
1118 self._options.pileup=
None 1121 self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1126 output = cms.OutputModule(
"PoolOutputModule")
1127 if stream.selectEvents.parameters_().__len__()!=0:
1128 output.SelectEvents = stream.selectEvents
1130 output.SelectEvents = cms.untracked.PSet()
1131 output.SelectEvents.SelectEvents=cms.vstring()
1132 if isinstance(stream.paths,tuple):
1133 for path
in stream.paths:
1134 output.SelectEvents.SelectEvents.append(path.label())
1136 output.SelectEvents.SelectEvents.append(stream.paths.label())
1140 if isinstance(stream.content,str):
1141 evtPset=getattr(self.process,stream.content)
1142 for p
in evtPset.parameters_():
1143 setattr(output,p,getattr(evtPset,p))
1144 if not self._options.inlineEventContent:
1147 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1149 output.outputCommands = stream.content
1152 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1154 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1155 filterName = cms.untracked.string(stream.name))
1157 if self._options.filtername:
1158 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1161 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1163 if workflow
in (
"producers,full"):
1164 if isinstance(stream.paths,tuple):
1165 for path
in stream.paths:
1166 self.schedule.append(path)
1168 self.schedule.append(stream.paths)
1172 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1173 self.additionalOutputs[name] = output
1174 setattr(self.process,name,output)
1176 if workflow ==
'output':
1178 filterList = output.SelectEvents.SelectEvents
1179 for i, filter
in enumerate(filterList):
1180 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1190 if ( len(sequence.split(
'.'))==1 ):
1192 elif ( len(sequence.split(
'.'))==2 ):
1194 sequence=sequence.split(
'.')[1]
1196 print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a" 1197 print sequence,
"not recognized" 1204 for i,s
in enumerate(seq.split(
'*')):
1206 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1208 p=getattr(self.process,prefix)
1209 p+=getattr(self.process, s)
1210 self.schedule.append(getattr(self.process,prefix))
1215 if self.nextScheduleIsConditional:
1216 self.conditionalPaths.append(prefix)
1217 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1218 self.schedule.append(getattr(self.process,prefix))
1220 for i,s
in enumerate(seq.split(
'+')):
1222 setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1223 self.schedule.append(getattr(self.process,sn))
1237 """ Enrich the process with alca streams """ 1239 sequence = sequence.split(
'.')[-1]
1242 alcaList = sequence.split(
"+")
1244 from Configuration.AlCa.autoAlca
import autoAlca
1248 for name
in alcaConfig.__dict__:
1249 alcastream = getattr(alcaConfig,name)
1250 shortName = name.replace(
'ALCARECOStream',
'')
1251 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1252 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1253 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1254 self.AlCaPaths.append(shortName)
1255 if 'DQM' in alcaList:
1256 if not self._options.inlineEventContent
and hasattr(self.process,name):
1257 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1259 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1262 if self._options.hltProcess
or 'HLT' in self.stepMap:
1263 if isinstance(alcastream.paths,tuple):
1264 for path
in alcastream.paths:
1269 for i
in range(alcaList.count(shortName)):
1270 alcaList.remove(shortName)
1273 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1274 path = getattr(alcaConfig,name)
1275 self.schedule.append(path)
1276 alcaList.remove(
'DQM')
1278 if isinstance(alcastream,cms.Path):
1280 self.blacklist_paths.append(alcastream)
1283 if len(alcaList) != 0:
1285 for name
in alcaConfig.__dict__:
1286 alcastream = getattr(alcaConfig,name)
1287 if isinstance(alcastream,cms.FilteredStream):
1288 available.append(name.replace(
'ALCARECOStream',
''))
1289 print "The following alcas could not be found "+
str(alcaList)
1290 print "available ",available
1292 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1297 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1298 print "Loading lhe fragment from",loadFragment
1299 __import__(loadFragment)
1300 self.process.load(loadFragment)
1302 self._options.inlineObjets+=
','+sequence
1304 getattr(self.process,sequence).nEvents =
int(self._options.number)
1307 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1308 self.excludedPaths.append(
"lhe_step")
1309 self.schedule.append( self.process.lhe_step )
1312 """ load the fragment of generator configuration """ 1317 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'')
1319 if not '/' in loadFragment:
1320 loadFragment=
'Configuration.Generator.'+loadFragment
1322 loadFragment=loadFragment.replace(
'/',
'.')
1324 print "Loading generator fragment from",loadFragment
1325 __import__(loadFragment)
1329 if not (self._options.filein
or self._options.dasquery):
1330 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1333 generatorModule=sys.modules[loadFragment]
1334 genModules=generatorModule.__dict__
1337 if self.LHEDefaultSeq
in genModules:
1338 del genModules[self.LHEDefaultSeq]
1340 if self._options.hideGen:
1341 self.loadAndRemember(loadFragment)
1343 self.process.load(loadFragment)
1345 import FWCore.ParameterSet.Modules
as cmstypes
1346 for name
in genModules:
1347 theObject = getattr(generatorModule,name)
1348 if isinstance(theObject, cmstypes._Module):
1349 self._options.inlineObjets=name+
','+self._options.inlineObjets
1350 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1351 self._options.inlineObjets+=
','+name
1353 if sequence == self.GENDefaultSeq
or sequence ==
'pgen_genonly':
1354 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1355 self.productionFilterSequence =
'ProductionFilterSequence' 1356 elif 'generator' in genModules:
1357 self.productionFilterSequence =
'generator' 1359 """ Enrich the schedule with the rest of the generation step """ 1360 self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1361 genSeqName=sequence.split(
'.')[-1]
1365 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1366 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1367 self.loadAndRemember(cffToBeLoaded)
1369 raise Exception(
"VertexSmearing type or beamspot "+self._options.beamspot+
" unknown.")
1371 if self._options.scenario ==
'HeavyIons':
1372 if self._options.pileup==
'HiMixGEN':
1373 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1375 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1377 self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1378 self.schedule.append(self.process.generation_step)
1381 self.executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1383 if 'reGEN' in self.stepMap:
1387 """ Enrich the schedule with the summary of the filter step """ 1389 self.loadAndRemember(
"GeneratorInterface/Core/genFilterSummary_cff")
1390 self.scheduleSequenceAtEnd(
'genFilterSummary',
'genfiltersummary_step')
1394 """ Enrich the schedule with the simulation step""" 1395 self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1396 if not self._options.fast:
1397 if self._options.gflash==
True:
1398 self.loadAndRemember(
"Configuration/StandardSequences/GFlashSIM_cff")
1400 if self._options.magField==
'0T':
1401 self.executeAndRemember(
"process.g4SimHits.UseMagneticField = cms.bool(False)")
1403 if self._options.magField==
'0T':
1404 self.executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1406 self.scheduleSequence(sequence.split(
'.')[-1],
'simulation_step')
1410 """ Enrich the schedule with the digitisation step""" 1413 if self._options.gflash==
True:
1414 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1416 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1417 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1419 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and not self.process.source.type_()==
'EmptySource':
1420 if self._options.inputEventContent==
'':
1421 self._options.inputEventContent=
'REGEN' 1423 self._options.inputEventContent=self._options.inputEventContent+
',REGEN' 1430 """ Enrich the schedule with the crossing frame writer step""" 1436 """ Enrich the schedule with the digitisation step""" 1440 if self._options.pileup_input:
1442 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
1443 theFiles=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1444 elif self._options.pileup_input.startswith(
"filelist:"):
1445 theFiles= (
filesFromList(self._options.pileup_input[9:]))[0]
1447 theFiles=self._options.pileup_input.split(
',')
1449 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1464 """ Enrich the schedule with the L1 simulation step""" 1465 assert(sequence ==
None)
1471 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1472 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1473 if sequence
in supported:
1474 self.loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1475 if self._options.scenario ==
'HeavyIons':
1476 self.renameInputTagsInSequence(
"SimL1Emulator",
"rawDataCollector",
"rawDataRepacker")
1477 self.scheduleSequence(
'SimL1Emulator',
'L1RePack_step')
1479 print "L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported
1484 """ Enrich the schedule with the HLT simulation step""" 1486 print "no specification of the hlt menu has been given, should never happen" 1487 raise Exception(
'no HLT sequence provided')
1491 from Configuration.HLT.autoHLT
import autoHLT
1494 sequence = autoHLT[key]
1496 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1502 if self._options.scenario ==
'HeavyIons':
1503 optionsForHLT[
'type'] =
'HIon' 1505 optionsForHLT[
'type'] =
'GRun' 1506 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in six.iteritems(optionsForHLT))
1507 if sequence ==
'run,fromSource':
1508 if hasattr(self.process.source,
'firstRun'):
1509 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1510 elif hasattr(self.process.source,
'setRunNumber'):
1511 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1513 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1515 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1519 if self._options.isMC:
1520 self._options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1522 if self._options.name !=
'HLT':
1523 self.additionalCommands.append(
'from HLTrigger.Configuration.CustomConfigs import ProcessName')
1524 self.additionalCommands.append(
'process = ProcessName(process)')
1525 self.additionalCommands.append(
'')
1526 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1529 self.schedule.append(self.process.HLTSchedule)
1530 [self.blacklist_paths.append(path)
for path
in self.process.HLTSchedule
if isinstance(path,(cms.Path,cms.EndPath))]
1533 if self._options.fast:
1534 if not hasattr(self.process,
'HLTEndSequence'):
1535 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1540 seqReco=sequence.split(
',')[1]
1541 seqDigi=sequence.split(
',')[0]
1543 print "RAW2RECO requires two specifications",sequence,
"insufficient" 1557 self.
loadAndRemember(
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1559 for filt
in allMetFilterPaths:
1560 self.schedule.append(getattr(self.process,
'Flag_'+filt))
1563 ''' Enrich the schedule with L1 HW validation ''' 1566 print '\n\n\n DEPRECATED this has no action \n\n\n' 1570 ''' Enrich the schedule with L1 reconstruction ''' 1576 ''' Enrich the schedule with L1 reconstruction ''' 1582 ''' Enrich the schedule with a user defined filter sequence ''' 1584 filterConfig=self.load(sequence.split(
'.')[0])
1585 filterSeq=sequence.split(
'.')[-1]
1593 label=visitee.label()
1601 getattr(self.process,filterSeq).
visit( expander )
1602 self._options.inlineObjets+=
','+expander.inliner
1603 self._options.inlineObjets+=
','+filterSeq
1606 self.scheduleSequence(filterSeq,
'filtering_step')
1607 self.nextScheduleIsConditional=
True 1609 self.productionFilterSequence = filterSeq
1614 ''' Enrich the schedule with reconstruction ''' 1620 ''' Enrich the schedule with reconstruction ''' 1626 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1627 if not self._options.fast:
1628 print "ERROR: this step is only implemented for FastSim" 1631 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1635 ''' Enrich the schedule with PAT ''' 1638 self.labelsToAssociate.append(
'patTask')
1639 if not self._options.runUnscheduled:
1640 raise Exception(
"MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1641 if self._options.isData:
1642 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1644 if self._options.fast:
1645 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1647 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1649 if self._options.hltProcess:
1650 if len(self._options.customise_commands) > 1:
1651 self._options.customise_commands = self._options.customise_commands +
" \n" 1652 self._options.customise_commands = self._options.customise_commands +
"process.patTrigger.processName = \""+self._options.hltProcess+
"\"\n" 1653 self._options.customise_commands = self._options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1654 self._options.customise_commands = self._options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1661 ''' Enrich the schedule with PATGEN ''' 1663 self.labelsToAssociate.append(
'patGENTask')
1664 if not self._options.runUnscheduled:
1665 raise Exception(
"MiniGEN production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1666 if self._options.isData:
1667 raise Exception(
"PATGEN step can only run on MC")
1671 ''' Enrich the schedule with NANO ''' 1674 custom =
"nanoAOD_customizeData" if self._options.isData
else "nanoAOD_customizeMC" 1675 if self._options.runUnscheduled:
1676 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1678 self._options.customisation_file.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1679 if self._options.hltProcess:
1680 if len(self._options.customise_commands) > 1:
1681 self._options.customise_commands = self._options.customise_commands +
" \n" 1682 self._options.customise_commands = self._options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1686 ''' Enrich the schedule with event interpretation ''' 1687 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1688 if sequence
in EventInterpretation:
1689 self.EIDefaultCFF = EventInterpretation[sequence]
1690 sequence =
'EIsequence' 1692 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1693 self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1694 self.scheduleSequence(sequence.split(
'.')[-1],
'eventinterpretaion_step')
1698 ''' Enrich the schedule with skimming fragments''' 1700 sequence = sequence.split(
'.')[-1]
1702 skimlist=sequence.split(
'+')
1704 from Configuration.Skimming.autoSkim
import autoSkim
1708 for skim
in skimConfig.__dict__:
1709 skimstream = getattr(skimConfig,skim)
1710 if isinstance(skimstream,cms.Path):
1712 self.blacklist_paths.append(skimstream)
1713 if (
not isinstance(skimstream,cms.FilteredStream)):
1715 shortname = skim.replace(
'SKIMStream',
'')
1716 if (sequence==
"all"):
1718 elif (shortname
in skimlist):
1721 if self._options.datatier==
'DQM':
1722 self.process.load(self.EVTCONTDefaultCFF)
1723 skimstreamDQM = cms.FilteredStream(
1724 responsible = skimstream.responsible,
1725 name = skimstream.name+
'DQM',
1726 paths = skimstream.paths,
1727 selectEvents = skimstream.selectEvents,
1728 content = self._options.datatier+
'EventContent',
1729 dataTier = cms.untracked.string(self._options.datatier)
1732 for i
in range(skimlist.count(shortname)):
1733 skimlist.remove(shortname)
1737 if (skimlist.__len__()!=0
and sequence!=
"all"):
1738 print 'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist)
1739 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1742 ''' Enrich the schedule with a user defined sequence ''' 1748 """ Enrich the schedule with the postreco step """ 1755 print sequence,
"in preparing validation" 1757 from Validation.Configuration.autoValidation
import autoValidation
1759 sequence=sequence.split(
'.')[-1]
1760 if sequence.find(
',')!=-1:
1761 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1762 valSeqName=sequence.split(
',')[1].
split(
'+')
1767 prevalSeqName=sequence.split(
'+')
1768 valSeqName=sequence.split(
'+')
1774 postfix=
'_'+sequence
1775 prevalSeqName=[
'prevalidation'+postfix]
1776 valSeqName=[
'validation'+postfix]
1777 if not hasattr(self.process,valSeqName[0]):
1779 valSeqName=[sequence]
1790 if (
'HLT' in self.stepMap
and not self._options.fast)
or self._options.hltProcess:
1791 for s
in valSeqName+prevalSeqName:
1794 for (i,s)
in enumerate(prevalSeqName):
1796 setattr(self.process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1797 self.schedule.append(getattr(self.process,
'prevalidation_step%s'%NFI(i)))
1799 for (i,s)
in enumerate(valSeqName):
1800 setattr(self.process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1801 self.schedule.append(getattr(self.process,
'validation_step%s'%NFI(i)))
1804 if 'PAT' in self.stepMap
and not 'RECO' in self.stepMap:
1807 if not 'DIGI' in self.stepMap
and not self._options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1808 if self._options.restoreRNDSeeds==
False and not self._options.restoreRNDSeeds==
True:
1809 self._options.restoreRNDSeeds=
True 1811 if not 'DIGI' in self.stepMap
and not self._options.fast:
1815 self._options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1817 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1819 for (i,s)
in enumerate(valSeqName):
1820 getattr(self.process,
'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,
'validation_step%s'%NFI(i))._seq
1826 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1827 It will climb down within PSets, VPSets and VInputTags to find its target""" 1828 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1835 if isinstance(pset, cms._Parameterizable):
1836 for name
in pset.parameters_().
keys():
1842 value = getattr(pset,name)
1843 type = value.pythonTypeName()
1844 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1845 self.
doIt(value,base+
"."+name)
1846 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1847 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1848 elif type
in (
'cms.string',
'cms.untracked.string'):
1850 if self.
_verbose:
print "set string process name %s.%s %s ==> %s"% (base, name, value, self.
_paramReplace)
1852 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1853 for (i,n)
in enumerate(value):
1854 if not isinstance(n, cms.InputTag):
1858 if self.
_verbose:
print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self.
_paramReplace)
1861 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1862 for (i,n)
in enumerate(value):
1865 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1867 if self.
_verbose:
print "set process name %s.%s %s ==> %s " % (base, name, value, self.
_paramReplace)
1868 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1873 label = visitee.label()
1874 except AttributeError:
1875 label =
'<Module not in a Process>' 1877 label =
'other execption' 1878 self.
doIt(visitee, label)
1885 print "Replacing all InputTag %s => %s"%(oldT,newT)
1888 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1889 if not loadMe
in self.additionalCommands:
1890 self.additionalCommands.append(loadMe)
1891 self.additionalCommands.append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1895 if self._options.hltProcess:
1896 proc=self._options.hltProcess
1898 proc=self.process.name_()
1899 if proc==HLTprocess:
return 1901 print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1903 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1904 self.additionalCommands.append(
'from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1905 self.additionalCommands.append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1911 while '@' in repr(seqList)
and level<maxLevel:
1913 for specifiedCommand
in seqList:
1914 if specifiedCommand.startswith(
'@'):
1915 location=specifiedCommand[1:]
1916 if not location
in mapping:
1917 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1918 mappedTo=mapping[location]
1920 mappedTo=mappedTo[index]
1921 seqList.remove(specifiedCommand)
1922 seqList.extend(mappedTo.split(
'+'))
1925 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1931 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1932 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1933 from DQMOffline.Configuration.autoDQM
import autoDQM
1937 if len(set(sequenceList))!=len(sequenceList):
1938 sequenceList=
list(set(sequenceList))
1939 print "Duplicate entries for DQM:, using",sequenceList
1941 pathName=
'dqmoffline_step' 1942 for (i,sequence)
in enumerate(sequenceList):
1944 pathName=
'dqmoffline_%d_step'%(i)
1946 if 'HLT' in self.stepMap.keys()
or self._options.hltProcess:
1949 setattr(self.process,pathName, cms.EndPath( getattr(self.process,sequence ) ) )
1950 self.schedule.append(getattr(self.process,pathName))
1952 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1954 getattr(self.process,pathName).
insert(0,self.process.genstepfilter)
1956 pathName=
'dqmofflineOnPAT_step' 1957 for (i,sequence)
in enumerate(postSequenceList):
1959 pathName=
'dqmofflineOnPAT_%d_step'%(i)
1961 setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1962 self.schedule.append(getattr(self.process,pathName))
1965 """ Enrich the process with harvesting step """ 1966 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self._options.harvesting+
'_cff' 1970 sequence = sequence.split(
'.')[-1]
1973 harvestingList = sequence.split(
"+")
1974 from DQMOffline.Configuration.autoDQM
import autoDQM
1975 from Validation.Configuration.autoValidation
import autoValidation
1977 combined_mapping = copy.deepcopy( autoDQM )
1978 combined_mapping.update( autoValidation )
1979 self.
expandMapping(harvestingList,combined_mapping,index=-1)
1981 if len(set(harvestingList))!=len(harvestingList):
1982 harvestingList=
list(set(harvestingList))
1983 print "Duplicate entries for HARVESTING, using",harvestingList
1985 for name
in harvestingList:
1986 if not name
in harvestingConfig.__dict__:
1987 print name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1989 harvestingstream = getattr(harvestingConfig,name)
1990 if isinstance(harvestingstream,cms.Path):
1991 self.schedule.append(harvestingstream)
1992 self.blacklist_paths.append(harvestingstream)
1993 if isinstance(harvestingstream,cms.Sequence):
1994 setattr(self.process,name+
"_step",cms.Path(harvestingstream))
1995 self.schedule.append(getattr(self.process,name+
"_step"))
2001 """ Enrich the process with AlCaHarvesting step """ 2003 sequence=sequence.split(
".")[-1]
2006 harvestingList = sequence.split(
"+")
2010 from Configuration.AlCa.autoPCL
import autoPCL
2013 for name
in harvestingConfig.__dict__:
2014 harvestingstream = getattr(harvestingConfig,name)
2015 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2016 self.schedule.append(harvestingstream)
2017 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2018 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2019 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2020 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2022 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2023 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2024 harvestingList.remove(name)
2026 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2027 self.schedule.append(lastStep)
2029 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2030 print "The following harvesting could not be found : ", harvestingList
2031 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2041 self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2042 self.schedule.append(self.process.reconstruction)
2046 """ Add useful info for the production. """ 2047 self.process.configurationMetadata=cms.untracked.PSet\
2048 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2049 name=cms.untracked.string(
"Applications"),
2050 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2053 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2058 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2059 self.
pythonCfgCode +=
"# with command line options: "+self._options.arguments+
'\n' 2060 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2065 modifierImports=[
'from Configuration.StandardSequences.Eras import eras']
2067 if hasattr(self.
_options,
"era")
and self._options.era :
2069 from Configuration.StandardSequences.Eras
import eras
2070 for requestedEra
in self._options.era.split(
",") :
2071 modifierStrings.append(
"eras."+requestedEra)
2072 modifiers.append(getattr(eras,requestedEra))
2075 if hasattr(self.
_options,
"procModifiers")
and self._options.procModifiers:
2078 for pm
in self._options.procModifiers.split(
','):
2079 modifierStrings.append(pm)
2080 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2081 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2084 self.
pythonCfgCode +=
"process = cms.Process('"+self._options.name+
"'" 2087 if len(modifierStrings)>0:
2093 if self.process ==
None:
2094 if len(modifiers)>0:
2095 self.process = cms.Process(self._options.name,*modifiers)
2097 self.process = cms.Process(self._options.name)
2103 """ Prepare the configuration string and add missing pieces.""" 2115 outputModuleCfgCode=
"" 2116 if not 'HARVESTING' in self.stepMap.keys()
and not 'ALCAHARVEST' in self.stepMap.keys()
and not 'ALCAOUTPUT' in self.stepMap.keys()
and self.with_output:
2121 self.
pythonCfgCode +=
"# import of standard configurations\n" 2122 for module
in self.imports:
2126 if not hasattr(self.process,
"configurationMetadata"):
2130 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2133 for comment,object
in self.addedObjects:
2145 nl=sorted(self.additionalOutputs.keys())
2147 output = self.additionalOutputs[name]
2148 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2149 tmpOut = cms.EndPath(output)
2150 setattr(self.process,name+
'OutPath',tmpOut)
2151 self.schedule.append(tmpOut)
2155 for command
in self.additionalCommands:
2159 for object
in self._options.inlineObjets.split(
','):
2162 if not hasattr(self.process,object):
2163 print 'cannot inline -'+object+
'- : not known' 2170 for path
in self.process.paths:
2171 if getattr(self.process,path)
not in self.blacklist_paths:
2174 for endpath
in self.process.endpaths:
2175 if getattr(self.process,endpath)
not in self.blacklist_paths:
2180 result =
"process.schedule = cms.Schedule(" 2183 self.process.schedule = cms.Schedule()
2184 for item
in self.schedule:
2185 if not isinstance(item, cms.Schedule):
2186 self.process.schedule.append(item)
2188 self.process.schedule.extend(item)
2190 if hasattr(self.process,
"HLTSchedule"):
2191 beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2192 afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2193 pathNames = [
'process.'+p.label_()
for p
in beforeHLT]
2194 result +=
','.
join(pathNames)+
')\n' 2195 result +=
'process.schedule.extend(process.HLTSchedule)\n' 2196 pathNames = [
'process.'+p.label_()
for p
in afterHLT]
2197 result +=
'process.schedule.extend(['+
','.
join(pathNames)+
'])\n' 2199 pathNames = [
'process.'+p.label_()
for p
in self.schedule]
2200 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2204 for labelToAssociate
in self.labelsToAssociate:
2205 self.process.schedule.associate(getattr(self.process, labelToAssociate))
2206 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2210 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2213 if self._options.nThreads
is not "1":
2216 self.
pythonCfgCode +=
"process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+
")\n" 2217 self.
pythonCfgCode +=
"process.options.numberOfStreams=cms.untracked.uint32(0)\n" 2219 if self._options.isRepacked:
2221 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2223 MassReplaceInputTag(self.process)
2226 if self.productionFilterSequence:
2227 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2229 if len(self.conditionalPaths):
2230 self.
pythonCfgCode +=
'\tif not path in %s: continue\n'%
str(self.conditionalPaths)
2231 if len(self.excludedPaths):
2233 self.
pythonCfgCode +=
'\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2234 pfs = getattr(self.process,self.productionFilterSequence)
2235 for path
in self.process.paths:
2236 if not path
in self.conditionalPaths:
continue 2237 if path
in self.excludedPaths:
continue 2238 getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2244 if self._options.runUnscheduled:
2247 self.
pythonCfgCode+=
"#do not add changes to your config after this point (unless you know what you are doing)\n" 2248 self.
pythonCfgCode+=
"from FWCore.ParameterSet.Utilities import convertToUnscheduled\n" 2249 self.
pythonCfgCode+=
"process=convertToUnscheduled(process)\n" 2251 from FWCore.ParameterSet.Utilities
import convertToUnscheduled
2258 if hasattr(self.process,
"logErrorHarvester"):
2260 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2261 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2262 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2263 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2270 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2271 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2272 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2274 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2280 if self._options.io:
2282 if not self._options.io.endswith(
'.io'): self._option.io+=
'.io' 2283 io=open(self._options.io,
'w')
2285 if hasattr(self.process.source,
"fileNames"):
2286 if len(self.process.source.fileNames.value()):
2287 ioJson[
'primary']=self.process.source.fileNames.value()
2288 if hasattr(self.process.source,
"secondaryFileNames"):
2289 if len(self.process.source.secondaryFileNames.value()):
2290 ioJson[
'secondary']=self.process.source.secondaryFileNames.value()
2291 if self._options.pileup_input
and (self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:')):
2292 ioJson[
'pileup']=self._options.pileup_input[4:]
2293 for (o,om)
in self.process.outputModules_().
items():
2294 ioJson[o]=om.fileName.value()
2295 ioJson[
'GT']=self.process.GlobalTag.globaltag.value()
2296 if self.productionFilterSequence:
2297 ioJson[
'filter']=self.productionFilterSequence
2299 io.write(json.dumps(ioJson))
def load(self, includeFile)
def filesFromOption(self)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def prepare_L1REPACK(self, sequence=None)
def finalizeFastSimHLT(self)
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def doNotInlineEventContent(instance, label="cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)")
event content
def customiseEarlyDelete(process)
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_RECO(self, sequence="reconstruction")
put the filtering path in the schedule
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def prepare_PATGEN(self, sequence="miniGEN")
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def prepare_RECOSIM(self, sequence="recosim")
def addCustomise(self, unsch=0)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
now for a useful command
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, sequence=None)
def convertToUnscheduled(proc)
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_NANO(self, sequence="nanoAOD")
inliner
needs to be in reverse order
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
def prepare_ALCAHARVEST(self, sequence=None)
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def completeInputCommand(self)
add the corresponding input content
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def addStandardSequences(self)
def prepare_PAT(self, sequence="miniAOD")