3 __version__ =
"$Revision: 1.19 $" 4 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 6 import FWCore.ParameterSet.Config
as cms
7 from FWCore.ParameterSet.Modules
import _Module
11 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
16 from subprocess
import Popen,PIPE
17 import FWCore.ParameterSet.DictTypes
as DictTypes
23 defaultOptions.datamix =
'DataOnSim' 24 defaultOptions.isMC=
False 25 defaultOptions.isData=
True 26 defaultOptions.step=
'' 27 defaultOptions.pileup=
'NoPileUp' 28 defaultOptions.pileup_input =
None 29 defaultOptions.pileup_dasoption =
'' 30 defaultOptions.geometry =
'SimDB' 31 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
32 defaultOptions.magField =
'' 33 defaultOptions.conditions =
None 34 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
35 defaultOptions.harvesting=
'AtRunEnd' 36 defaultOptions.gflash =
False 37 defaultOptions.number = -1
38 defaultOptions.number_out =
None 39 defaultOptions.arguments =
"" 40 defaultOptions.name =
"NO NAME GIVEN" 41 defaultOptions.evt_type =
"" 42 defaultOptions.filein =
"" 43 defaultOptions.dasquery=
"" 44 defaultOptions.dasoption=
"" 45 defaultOptions.secondfilein =
"" 46 defaultOptions.customisation_file = []
47 defaultOptions.customisation_file_unsch = []
48 defaultOptions.customise_commands =
"" 49 defaultOptions.inline_custom=
False 50 defaultOptions.particleTable =
'pythiapdt' 51 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
52 defaultOptions.dirin =
'' 53 defaultOptions.dirout =
'' 54 defaultOptions.filetype =
'EDM' 55 defaultOptions.fileout =
'output.root' 56 defaultOptions.filtername =
'' 57 defaultOptions.lazy_download =
False 58 defaultOptions.custom_conditions =
'' 59 defaultOptions.hltProcess =
'' 60 defaultOptions.eventcontent =
None 61 defaultOptions.datatier =
None 62 defaultOptions.inlineEventContent =
True 63 defaultOptions.inlineObjets =
'' 64 defaultOptions.hideGen=
False 65 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
66 defaultOptions.beamspot=
None 67 defaultOptions.outputDefinition =
'' 68 defaultOptions.inputCommands =
None 69 defaultOptions.outputCommands =
None 70 defaultOptions.inputEventContent =
'' 71 defaultOptions.dropDescendant =
False 72 defaultOptions.relval =
None 73 defaultOptions.profile =
None 74 defaultOptions.isRepacked =
False 75 defaultOptions.restoreRNDSeeds =
False 76 defaultOptions.donotDropOnInput =
'' 77 defaultOptions.python_filename =
'' 78 defaultOptions.io=
None 79 defaultOptions.lumiToProcess=
None 80 defaultOptions.fast=
False 81 defaultOptions.runsAndWeightsForMC =
None 82 defaultOptions.runsScenarioForMC =
None 83 defaultOptions.runUnscheduled =
False 84 defaultOptions.timeoutOutput =
False 85 defaultOptions.nThreads =
'1' 89 theObject = getattr(process,name)
90 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
91 return "process."+name+
" = " + theObject.dumpPython(
"process")
92 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
93 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 95 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 98 import FWCore.ParameterSet.Config
as cms
101 for line
in open(fileName,
'r'): 102 if line.count(
".root")>=2:
104 entries=line.replace(
"\n",
"").
split()
105 if not entries[0]
in prim:
106 prim.append(entries[0])
107 if not entries[1]
in sec:
108 sec.append(entries[1])
109 elif (line.find(
".root")!=-1):
110 entry=line.replace(
"\n",
"")
111 if not entry
in prim:
114 if not hasattr(s,
"fileNames"):
115 s.fileNames=cms.untracked.vstring(prim)
117 s.fileNames.extend(prim)
119 if not hasattr(s,
"secondaryFileNames"):
120 s.secondaryFileNames=cms.untracked.vstring(sec)
122 s.secondaryFileNames.extend(sec)
123 print "found files: ",prim
125 raise Exception(
"There are not files in input from the file list")
127 print "found parent files:",sec
132 import FWCore.ParameterSet.Config
as cms
135 print "the query is",query
138 while eC!=0
and count<3:
140 print 'Sleeping, then retrying DAS' 142 p = Popen(
'das_client %s --query "%s"'%(option,query), stdout=PIPE,shell=
True)
144 tupleP = os.waitpid(p.pid, 0)
148 print "DAS succeeded after",count,
"attempts",eC
150 print "DAS failed 3 times- I give up" 151 for line
in pipe.split(
'\n'):
152 if line.count(
".root")>=2:
154 entries=line.replace(
"\n",
"").
split()
155 if not entries[0]
in prim:
156 prim.append(entries[0])
157 if not entries[1]
in sec:
158 sec.append(entries[1])
159 elif (line.find(
".root")!=-1):
160 entry=line.replace(
"\n",
"")
161 if not entry
in prim:
164 if not hasattr(s,
"fileNames"):
165 s.fileNames=cms.untracked.vstring(prim)
167 s.fileNames.extend(prim)
169 if not hasattr(s,
"secondaryFileNames"):
170 s.secondaryFileNames=cms.untracked.vstring(sec)
172 s.secondaryFileNames.extend(sec)
173 print "found files: ",prim
175 print "found parent files:",sec
178 def anyOf(listOfKeys,dict,opt=None):
187 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
190 """The main building routines """ 192 def __init__(self, options, process = None, with_output = False, with_input = False ):
193 """options taken from old cmsDriver and optparse """ 195 options.outfile_name = options.dirout+options.fileout
199 if self._options.isData
and options.isMC:
200 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
205 if 'ENDJOB' in self._options.step:
206 if (hasattr(self.
_options,
"outputDefinition")
and \
207 self._options.outputDefinition !=
'' and \
208 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self._options.outputDefinition)))
or \
209 (hasattr(self.
_options,
"datatier")
and \
210 self._options.datatier
and \
211 'DQMIO' in self._options.datatier):
212 print "removing ENDJOB from steps since not compatible with DQMIO dataTier" 213 self._options.step=self._options.step.replace(
',ENDJOB',
'')
218 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
221 for step
in self._options.step.split(
","):
222 if step==
'':
continue 223 stepParts = step.split(
":")
224 stepName = stepParts[0]
225 if stepName
not in stepList
and not stepName.startswith(
're'):
226 raise ValueError(
"Step "+stepName+
" unknown")
227 if len(stepParts)==1:
228 self.stepMap[stepName]=
"" 229 elif len(stepParts)==2:
230 self.stepMap[stepName]=stepParts[1].
split(
'+')
231 elif len(stepParts)==3:
232 self.stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
234 raise ValueError(
"Step definition "+step+
" invalid")
235 self.stepKeys.append(stepName)
239 self.with_output = with_output
240 if hasattr(self.
_options,
"no_output_flag")
and self._options.no_output_flag:
241 self.with_output =
False 242 self.with_input = with_input
244 self.process = cms.Process(self._options.name)
246 self.process = process
249 self.schedule =
list()
255 self.additionalCommands = []
257 self.blacklist_paths = []
258 self.addedObjects = []
259 self.additionalOutputs = {}
261 self.productionFilterSequence =
None 262 self.labelsToAssociate=[]
263 self.nextScheduleIsConditional=
False 264 self.conditionalPaths=[]
265 self.excludedPaths=[]
270 Function to add the igprof profile service so that you can dump in the middle 273 profileOpts = self._options.profile.split(
':')
275 profilerInterval = 100
276 profilerFormat =
None 277 profilerJobFormat =
None 283 startEvent = profileOpts.pop(0)
284 if not startEvent.isdigit():
285 raise Exception(
"%s is not a number" % startEvent)
286 profilerStart =
int(startEvent)
288 eventInterval = profileOpts.pop(0)
289 if not eventInterval.isdigit():
290 raise Exception(
"%s is not a number" % eventInterval)
291 profilerInterval =
int(eventInterval)
293 profilerFormat = profileOpts.pop(0)
296 if not profilerFormat:
297 profilerFormat =
"%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace(
"_cfi",
""),
299 self._options.pileup,
300 self._options.conditions,
301 self._options.datatier,
302 self._options.profileTypeLabel)
303 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
304 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
305 elif not profilerJobFormat:
306 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 308 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
311 includeFile = includeFile.replace(
'/',
'.')
312 self.process.load(includeFile)
313 return sys.modules[includeFile]
316 """helper routine to load am memorize imports""" 319 includeFile = includeFile.replace(
'/',
'.')
320 self.imports.append(includeFile)
321 self.process.load(includeFile)
322 return sys.modules[includeFile]
325 """helper routine to remember replace statements""" 326 self.additionalCommands.append(command)
327 if not command.strip().startswith(
"#"):
330 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
334 if 'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys():
335 self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring(
'ProductNotFound'),fileMode = cms.untracked.string(
'FULLMERGE'))
337 self.process.options = cms.untracked.PSet( )
339 self.addedObjects.append((
"",
"options"))
341 if self._options.lazy_download:
342 self.process.AdaptorConfig = cms.Service(
"AdaptorConfig",
343 stats = cms.untracked.bool(
True),
344 enable = cms.untracked.bool(
True),
345 cacheHint = cms.untracked.string(
"lazy-download"),
346 readHint = cms.untracked.string(
"read-ahead-buffered")
348 self.addedObjects.append((
"Setup lazy download",
"AdaptorConfig"))
353 if self._options.profile:
355 self.process.IgProfService = cms.Service(
"IgProfService",
356 reportFirstEvent = cms.untracked.int32(start),
357 reportEventInterval = cms.untracked.int32(interval),
358 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
359 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
360 self.addedObjects.append((
"Setup IGProf Service for profiling",
"IgProfService"))
363 """Here we decide how many evts will be processed""" 364 self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(
int(self._options.number)))
365 if self._options.number_out:
366 self.process.maxEvents.output = cms.untracked.int32(
int(self._options.number_out))
367 self.addedObjects.append((
"",
"maxEvents"))
370 """Here the source is built. Priority: file, generator""" 371 self.addedObjects.append((
"Input source",
"source"))
374 for entry
in self._options.filein.split(
','):
376 if entry.startswith(
"filelist:"):
378 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
379 filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
381 self.process.source.fileNames.append(self._options.dirin+entry)
382 if self._options.secondfilein:
383 if not hasattr(self.process.source,
"secondaryFileNames"):
384 raise Exception(
"--secondfilein not compatible with "+self._options.filetype+
"input type")
385 for entry
in self._options.secondfilein.split(
','):
387 if entry.startswith(
"filelist:"):
388 self.process.source.secondaryFileNames.extend((
filesFromList(entry[9:]))[0])
389 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
390 self.process.source.secondaryFileNames.extend((
filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
392 self.process.source.secondaryFileNames.append(self._options.dirin+entry)
394 if self._options.filein
or self._options.dasquery:
395 if self._options.filetype ==
"EDM":
396 self.process.source=cms.Source(
"PoolSource",
397 fileNames = cms.untracked.vstring(),
398 secondaryFileNames= cms.untracked.vstring())
400 elif self._options.filetype ==
"DAT":
401 self.process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
403 elif self._options.filetype ==
"LHE":
404 self.process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
405 if self._options.filein.startswith(
"lhe:"):
407 args=self._options.filein.split(
':')
409 print 'LHE input from article ',article
410 location=
'/store/lhe/' 412 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
413 for line
in textOfFiles:
414 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
415 self.process.source.fileNames.append(location+article+
'/'+fileName)
418 print 'Issue to load LHE files, please check and try again.' 421 if len(self.process.source.fileNames)==0:
422 print 'Issue with empty filename, but can pass line check' 425 self.process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
429 elif self._options.filetype ==
"DQM":
430 self.process.source=cms.Source(
"DQMRootSource",
431 fileNames = cms.untracked.vstring())
434 elif self._options.filetype ==
"DQMDAQ":
436 self.process.source=cms.Source(
"DQMStreamerReader")
439 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
440 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
442 if self._options.dasquery!=
'':
443 self.process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
444 filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
446 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
447 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
450 if 'GEN' in self.stepMap.keys():
451 if self._options.inputCommands:
452 self._options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 454 self._options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 456 if self.process.source
and self._options.inputCommands:
457 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
458 for command
in self._options.inputCommands.split(
','):
460 command = command.strip()
461 if command==
'':
continue 462 self.process.source.inputCommands.append(command)
463 if not self._options.dropDescendant:
464 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
466 if self._options.lumiToProcess:
467 import FWCore.PythonUtilities.LumiList
as LumiList
468 self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange(
LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().
split(
',') )
470 if 'GEN' in self.stepMap.keys()
or 'LHE' in self.stepMap
or (
not self._options.filein
and hasattr(self.
_options,
"evt_type")):
471 if self.process.source
is None:
472 self.process.source=cms.Source(
"EmptySource")
475 self.runsAndWeights=
None 476 if self._options.runsAndWeightsForMC
or self._options.runsScenarioForMC :
477 if not self._options.isMC :
478 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
479 if self._options.runsAndWeightsForMC:
480 self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
482 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
483 if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
484 __import__(RunsAndWeights[self._options.runsScenarioForMC])
485 self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
487 self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
489 if self.runsAndWeights:
490 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
492 self.additionalCommands.append(
'import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
493 self.additionalCommands.append(
'ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
498 """ Add output module to the process """ 500 if self._options.outputDefinition:
501 if self._options.datatier:
502 print "--datatier & --eventcontent options ignored" 505 outList = eval(self._options.outputDefinition)
506 for (id,outDefDict)
in enumerate(outList):
507 outDefDictStr=outDefDict.__str__()
508 if not isinstance(outDefDict,dict):
509 raise Exception(
"--output needs to be passed a list of dict"+self._options.outputDefinition+
" is invalid")
511 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
514 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
515 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
516 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
517 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
518 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
520 if not theModuleLabel:
521 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
522 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
523 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 525 for name
in tryNames:
526 if not hasattr(self.process,name):
529 if not theModuleLabel:
530 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
532 defaultFileName=self._options.outfile_name
534 defaultFileName=self._options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
536 theFileName=self._options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
537 if not theFileName.endswith(
'.root'):
540 if len(outDefDict.keys()):
541 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
542 if theStreamType==
'DQMIO': theStreamType=
'DQM' 543 if theStreamType==
'ALL':
544 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
546 theEventContent = getattr(self.process, theStreamType+
"EventContent")
550 if theStreamType==
'ALCARECO' and not theFilterName:
551 theFilterName=
'StreamALCACombined' 554 CppType=
'PoolOutputModule' 555 if self._options.timeoutOutput:
556 CppType=
'TimeoutPoolOutputModule' 557 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 558 output = cms.OutputModule(CppType,
559 theEventContent.clone(),
560 fileName = cms.untracked.string(theFileName),
561 dataset = cms.untracked.PSet(
562 dataTier = cms.untracked.string(theTier),
563 filterName = cms.untracked.string(theFilterName))
565 if not theSelectEvent
and hasattr(self.process,
'generation_step')
and theStreamType!=
'LHE':
566 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
567 if not theSelectEvent
and hasattr(self.process,
'filtering_step'):
568 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
570 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
573 if not hasattr(output,
'SelectEvents'):
574 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
575 for alca
in self.AlCaPaths:
576 output.SelectEvents.SelectEvents.extend(getattr(self.process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
579 if hasattr(self.process,theModuleLabel):
580 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
582 setattr(self.process,theModuleLabel,output)
583 outputModule=getattr(self.process,theModuleLabel)
584 setattr(self.process,theModuleLabel+
'_step',cms.EndPath(outputModule))
585 path=getattr(self.process,theModuleLabel+
'_step')
586 self.schedule.append(path)
588 if not self._options.inlineEventContent
and hasattr(self.process,theStreamType+
"EventContent"):
591 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
592 if theExtraOutputCommands:
593 if not isinstance(theExtraOutputCommands,list):
594 raise Exception(
"extra ouput command in --option must be a list of strings")
595 if hasattr(self.process,theStreamType+
"EventContent"):
596 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
598 outputModule.outputCommands.extend(theExtraOutputCommands)
600 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
605 streamTypes=self._options.eventcontent.split(
',')
606 tiers=self._options.datatier.split(
',')
607 if not self._options.outputDefinition
and len(streamTypes)!=len(tiers):
608 raise Exception(
"number of event content arguments does not match number of datatier arguments")
611 if self._options.step.split(
',')[0].
split(
':')[0] ==
'ALCA':
614 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
615 if streamType==
'':
continue 616 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self._options.step:
continue 617 if streamType==
'DQMIO': streamType=
'DQM' 618 theEventContent = getattr(self.process, streamType+
"EventContent")
620 theFileName=self._options.outfile_name
621 theFilterName=self._options.filtername
623 theFileName=self._options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
624 theFilterName=self._options.filtername
625 CppType=
'PoolOutputModule' 626 if self._options.timeoutOutput:
627 CppType=
'TimeoutPoolOutputModule' 628 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 629 if "NANOAOD" in streamType
and 'NANOAOD' in tier : CppType=
'NanoAODOutputModule' 630 output = cms.OutputModule(CppType,
632 fileName = cms.untracked.string(theFileName),
633 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
634 filterName = cms.untracked.string(theFilterName)
637 if hasattr(self.process,
"generation_step")
and streamType!=
'LHE':
638 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
639 if hasattr(self.process,
"filtering_step"):
640 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
642 if streamType==
'ALCARECO':
643 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
645 if "MINIAOD" in streamType:
646 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
649 outputModuleName=streamType+
'output' 650 setattr(self.process,outputModuleName,output)
651 outputModule=getattr(self.process,outputModuleName)
652 setattr(self.process,outputModuleName+
'_step',cms.EndPath(outputModule))
653 path=getattr(self.process,outputModuleName+
'_step')
654 self.schedule.append(path)
656 if self._options.outputCommands
and streamType!=
'DQM':
657 for evct
in self._options.outputCommands.split(
','):
658 if not evct:
continue 659 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
661 if not self._options.inlineEventContent:
664 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
666 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
672 Add selected standard sequences to the process 675 if self._options.pileup:
676 pileupSpec=self._options.pileup.split(
',')[0]
679 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
680 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
681 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
685 if '.' in pileupSpec:
686 mixingDict={
'file':pileupSpec}
687 elif pileupSpec.startswith(
'file:'):
688 mixingDict={
'file':pileupSpec[5:]}
691 mixingDict=copy.copy(Mixing[pileupSpec])
692 if len(self._options.pileup.split(
','))>1:
693 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(
',')+1:]))
696 if 'file:' in pileupSpec:
698 self.process.load(mixingDict[
'file'])
699 print "inlining mixing module configuration" 700 self._options.inlineObjets+=
',mix' 702 self.loadAndRemember(mixingDict[
'file'])
704 mixingDict.pop(
'file')
705 if not "DATAMIX" in self.stepMap.keys():
706 if self._options.pileup_input:
707 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
708 mixingDict[
'F']=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
709 elif self._options.pileup_input.startswith(
"filelist:"):
710 mixingDict[
'F']=(
filesFromList(self._options.pileup_input[9:]))[0]
712 mixingDict[
'F']=self._options.pileup_input.split(
',')
714 for command
in specialization:
715 self.executeAndRemember(command)
716 if len(mixingDict)!=0:
717 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
722 if len(self.stepMap):
723 self.loadAndRemember(self.GeometryCFF)
724 if (
'SIM' in self.stepMap
or 'reSIM' in self.stepMap)
and not self._options.fast:
725 self.loadAndRemember(self.SimGeometryCFF)
726 if self.geometryDBLabel:
727 self.executeAndRemember(
'process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
729 print "Geometry option",self._options.geometry,
"unknown." 732 if len(self.stepMap):
733 self.loadAndRemember(self.magFieldCFF)
735 for stepName
in self.stepKeys:
736 stepSpec = self.stepMap[stepName]
737 print "Step:", stepName,
"Spec:",stepSpec
738 if stepName.startswith(
're'):
740 if stepName[2:]
not in self._options.donotDropOnInput:
741 self._options.inputEventContent=
'%s,%s'%(stepName.upper(),self._options.inputEventContent)
742 stepName=stepName[2:]
744 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
745 elif type(stepSpec)==list:
746 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
747 elif type(stepSpec)==tuple:
748 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
750 raise ValueError(
"Invalid step definition")
752 if self._options.restoreRNDSeeds!=
False:
754 if self._options.restoreRNDSeeds==
True:
755 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
757 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
758 if self._options.inputEventContent
or self._options.inputCommands:
759 if self._options.inputCommands:
760 self._options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 762 self._options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 766 if self._options.inputEventContent:
768 def dropSecondDropStar(iec):
779 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
780 for evct
in self._options.inputEventContent.split(
','):
781 if evct==
'':
continue 782 theEventContent = getattr(self.process, evct+
"EventContent")
783 if hasattr(theEventContent,
'outputCommands'):
784 self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
785 if hasattr(theEventContent,
'inputCommands'):
786 self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
788 dropSecondDropStar(self.process.source.inputCommands)
790 if not self._options.dropDescendant:
791 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
797 """Add conditions to the process""" 798 if not self._options.conditions:
return 800 if 'FrontierConditions_GlobalTag' in self._options.conditions:
801 print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line' 802 self._options.conditions = self._options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
804 self.loadAndRemember(self.ConditionsDefaultCFF)
806 self.process.GlobalTag =
GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
807 self.additionalCommands.append(
'from Configuration.AlCa.GlobalTag import GlobalTag')
808 self.additionalCommands.append(
'process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
812 """Include the customise code """ 816 for c
in self._options.customisation_file:
817 custOpt.extend(c.split(
","))
819 for c
in self._options.customisation_file_unsch:
820 custOpt.extend(c.split(
","))
826 raise Exception(
"more than . in the specification:"+opt)
827 fileName=opt.split(
'.')[0]
828 if opt.count(
'.')==0: rest=
'customise' 830 rest=opt.split(
'.')[1]
831 if rest==
'py': rest=
'customise' 833 if fileName
in custMap:
834 custMap[fileName].extend(rest.split(
'+'))
836 custMap[fileName]=rest.split(
'+')
841 final_snippet=
'\n# customisation of the process.\n' 845 allFcn.extend(custMap[opt])
847 if allFcn.count(fcn)!=1:
848 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
852 packageName = f.replace(
".py",
"").
replace(
"/",
".")
853 __import__(packageName)
854 package = sys.modules[packageName]
857 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
859 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 860 if self._options.inline_custom:
861 for line
in file(customiseFile,
'r'): 862 if "import FWCore.ParameterSet.Config" in line:
864 final_snippet += line
866 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
867 for fcn
in custMap[f]:
868 print "customising the process with",fcn,
"from",f
869 if not hasattr(package,fcn):
871 raise Exception(
"config "+f+
" has no function "+fcn)
873 self.process=getattr(package,fcn)(self.process)
875 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
876 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
879 final_snippet +=
'\n# End of customisation functions\n' 885 final_snippet=
'\n# Customisation from command line\n' 886 if self._options.customise_commands:
888 for com
in self._options.customise_commands.split(
'\\n'):
889 com=string.lstrip(com)
891 final_snippet +=
'\n'+com
900 if len(self.stepMap):
902 if self._options.particleTable
not in defaultOptions.particleTableList:
903 print 'Invalid particle table provided. Options are:' 904 print defaultOptions.particleTable
907 if len(self.stepMap):
908 self.
loadAndRemember(
'SimGeneral.HepPDTESSource.'+self._options.particleTable+
'_cfi')
927 self.EIDefaultCFF=
None 928 self.SKIMDefaultCFF=
"Configuration/StandardSequences/Skims_cff" 929 self.POSTRECODefaultCFF=
"Configuration/StandardSequences/PostRecoGenerator_cff" 930 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/Validation_cff" 931 self.L1HwValDefaultCFF =
"Configuration/StandardSequences/L1HwVal_cff" 932 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOffline_cff" 933 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/Harvesting_cff" 934 self.ALCAHARVESTDefaultCFF=
"Configuration/StandardSequences/AlCaHarvesting_cff" 935 self.ENDJOBDefaultCFF=
"Configuration/StandardSequences/EndOfProcess_cff" 936 self.ConditionsDefaultCFF =
"Configuration/StandardSequences/FrontierConditions_GlobalTag_cff" 937 self.CFWRITERDefaultCFF =
"Configuration/StandardSequences/CrossingFrameWriter_cff" 938 self.REPACKDefaultCFF=
"Configuration/StandardSequences/DigiToRaw_Repack_cff" 940 if "DATAMIX" in self.stepMap.keys():
941 self.DATAMIXDefaultCFF=
"Configuration/StandardSequences/DataMixer"+self._options.datamix+
"_cff" 942 if self._options.datamix ==
'PreMix':
943 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiDMPreMix_cff" 947 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 949 if "DIGIPREMIX" in self.stepMap.keys():
950 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/Digi_PreMix_cff" 952 self.
L1EMDefaultCFF=
"Configuration/StandardSequences/SimL1EmulatorPreMix_cff" 954 self.ALCADefaultSeq=
None 955 self.LHEDefaultSeq=
'externalLHEProducer' 956 self.GENDefaultSeq=
'pgen' 957 self.SIMDefaultSeq=
'psim' 958 self.DIGIDefaultSeq=
'pdigi' 959 self.DIGIPREMIXDefaultSeq=
'pdigi' 960 self.DIGIPREMIX_S2DefaultSeq=
'pdigi' 961 self.DATAMIXDefaultSeq=
None 962 self.DIGI2RAWDefaultSeq=
'DigiToRaw' 963 self.HLTDefaultSeq=
'GRun' 964 self.L1DefaultSeq=
None 965 self.L1REPACKDefaultSeq=
'GT' 966 self.HARVESTINGDefaultSeq=
None 967 self.ALCAHARVESTDefaultSeq=
None 968 self.CFWRITERDefaultSeq=
None 969 self.RAW2DIGIDefaultSeq=
'RawToDigi' 970 self.L1RecoDefaultSeq=
'L1Reco' 971 self.L1TrackTriggerDefaultSeq=
'L1TrackTrigger' 972 if self._options.fast
or (
'RAW2DIGI' in self.stepMap
and 'RECO' in self.stepMap):
973 self.RECODefaultSeq=
'reconstruction' 975 self.RECODefaultSeq=
'reconstruction_fromRECO' 976 self.RECOSIMDefaultSeq=
'recosim' 977 self.EIDefaultSeq=
'top' 978 self.POSTRECODefaultSeq=
None 979 self.L1HwValDefaultSeq=
'L1HwVal' 980 self.DQMDefaultSeq=
'DQMOffline' 981 self.VALIDATIONDefaultSeq=
'' 982 self.ENDJOBDefaultSeq=
'endOfProcess' 983 self.REPACKDefaultSeq=
'DigiToRawRepack' 984 self.PATDefaultSeq=
'miniAOD' 985 self.PATGENDefaultSeq=
'miniGEN' 986 self.NANODefaultSeq=
'nanoSequence' 988 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContent_cff" 990 if not self._options.beamspot:
991 self._options.beamspot=VtxSmearedDefaultKey
994 if self._options.isMC==
True:
996 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 997 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 998 self.PATGENDefaultCFF=
"Configuration/StandardSequences/PATGEN_cff" 999 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineMC_cff" 1000 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1001 self.NANODefaultSeq=
'nanoSequenceMC' 1003 self._options.beamspot =
None 1006 if 'reGEN' in self.stepMap:
1007 self.GENDefaultSeq=
'fixGenInfo' 1009 if self._options.scenario==
'cosmics':
1010 self._options.pileup=
'Cosmics' 1011 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1012 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1013 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1014 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentCosmics_cff" 1015 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationCosmics_cff" 1016 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmics_cff" 1017 if self._options.isMC==
True:
1018 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmicsMC_cff" 1019 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingCosmics_cff" 1020 self.RECODefaultSeq=
'reconstructionCosmics' 1021 self.DQMDefaultSeq=
'DQMOfflineCosmics' 1023 if self._options.scenario==
'HeavyIons':
1024 if not self._options.beamspot:
1025 self._options.beamspot=VtxSmearedHIDefaultKey
1026 self.HLTDefaultSeq =
'HIon' 1027 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationHeavyIons_cff" 1028 self.VALIDATIONDefaultSeq=
'' 1029 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentHeavyIons_cff" 1030 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1031 self.RECODefaultSeq=
'reconstructionHeavyIons' 1032 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1033 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIons_cff" 1034 self.DQMDefaultSeq=
'DQMOfflineHeavyIons' 1035 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1036 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingHeavyIons_cff" 1037 if self._options.isMC==
True:
1038 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff" 1041 self.RAW2RECODefaultSeq=
','.
join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1043 self.USERDefaultSeq=
'user' 1044 self.USERDefaultCFF=
None 1047 if self._options.isData:
1048 if self._options.magField==defaultOptions.magField:
1049 print "magnetic field option forced to: AutoFromDBCurrent" 1050 self._options.magField=
'AutoFromDBCurrent' 1051 self.magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace(
'.',
'')+
'_cff' 1052 self.magFieldCFF = self.magFieldCFF.replace(
"__",
'_')
1055 self.GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff' 1056 self.geometryDBLabel=
None 1058 if self._options.fast:
1059 if 'start' in self._options.conditions.lower():
1060 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1062 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1065 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1066 if opt
in GeometryConf:
1067 return GeometryConf[opt]
1071 geoms=self._options.geometry.split(
',')
1075 if '/' in geoms[1]
or '_cff' in geoms[1]:
1076 self.GeometryCFF=geoms[1]
1078 self.GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1080 if (geoms[0].startswith(
'DB:')):
1081 self.SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff' 1082 self.geometryDBLabel=geoms[0][3:]
1085 if '/' in geoms[0]
or '_cff' in geoms[0]:
1086 self.SimGeometryCFF=geoms[0]
1088 simGeometry=geoms[0]
1089 if self._options.gflash==
True:
1090 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1092 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1095 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1096 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1098 if self._options.scenario==
'nocoll' or self._options.scenario==
'cosmics':
1099 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1100 self._options.beamspot=
'NoSmear' 1103 if self._options.fast:
1104 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1105 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1106 self.RECOBEFMIXDefaultCFF =
'FastSimulation.Configuration.Reconstruction_BefMix_cff' 1107 self.RECOBEFMIXDefaultSeq =
'reconstruction_befmix' 1108 self.DQMOFFLINEDefaultCFF=
"FastSimulation.Configuration.DQMOfflineMC_cff" 1111 if self._options.pileup==
'default':
1112 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1113 self._options.pileup=MixingDefaultKey
1117 if self._options.isData:
1118 self._options.pileup=
None 1121 self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1126 output = cms.OutputModule(
"PoolOutputModule")
1127 if stream.selectEvents.parameters_().__len__()!=0:
1128 output.SelectEvents = stream.selectEvents
1130 output.SelectEvents = cms.untracked.PSet()
1131 output.SelectEvents.SelectEvents=cms.vstring()
1132 if isinstance(stream.paths,tuple):
1133 for path
in stream.paths:
1134 output.SelectEvents.SelectEvents.append(path.label())
1136 output.SelectEvents.SelectEvents.append(stream.paths.label())
1140 if isinstance(stream.content,str):
1141 evtPset=getattr(self.process,stream.content)
1142 for p
in evtPset.parameters_():
1143 setattr(output,p,getattr(evtPset,p))
1144 if not self._options.inlineEventContent:
1147 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1149 output.outputCommands = stream.content
1152 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1154 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1155 filterName = cms.untracked.string(stream.name))
1157 if self._options.filtername:
1158 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1161 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1163 if workflow
in (
"producers,full"):
1164 if isinstance(stream.paths,tuple):
1165 for path
in stream.paths:
1166 self.schedule.append(path)
1168 self.schedule.append(stream.paths)
1172 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1173 self.additionalOutputs[name] = output
1174 setattr(self.process,name,output)
1176 if workflow ==
'output':
1178 filterList = output.SelectEvents.SelectEvents
1179 for i, filter
in enumerate(filterList):
1180 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1190 if ( len(sequence.split(
'.'))==1 ):
1192 elif ( len(sequence.split(
'.'))==2 ):
1194 sequence=sequence.split(
'.')[1]
1196 print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a" 1197 print sequence,
"not recognized" 1204 for i,s
in enumerate(seq.split(
'*')):
1206 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1208 p=getattr(self.process,prefix)
1209 p+=getattr(self.process, s)
1210 self.schedule.append(getattr(self.process,prefix))
1215 if self.nextScheduleIsConditional:
1216 self.conditionalPaths.append(prefix)
1217 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1218 self.schedule.append(getattr(self.process,prefix))
1220 for i,s
in enumerate(seq.split(
'+')):
1222 setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1223 self.schedule.append(getattr(self.process,sn))
1237 """ Enrich the process with alca streams """ 1239 sequence = sequence.split(
'.')[-1]
1242 alcaList = sequence.split(
"+")
1244 from Configuration.AlCa.autoAlca
import autoAlca
1248 for name
in alcaConfig.__dict__:
1249 alcastream = getattr(alcaConfig,name)
1250 shortName = name.replace(
'ALCARECOStream',
'')
1251 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1252 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1253 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1254 self.AlCaPaths.append(shortName)
1255 if 'DQM' in alcaList:
1256 if not self._options.inlineEventContent
and hasattr(self.process,name):
1257 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1259 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1262 if self._options.hltProcess
or 'HLT' in self.stepMap:
1263 if isinstance(alcastream.paths,tuple):
1264 for path
in alcastream.paths:
1269 for i
in range(alcaList.count(shortName)):
1270 alcaList.remove(shortName)
1273 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1274 path = getattr(alcaConfig,name)
1275 self.schedule.append(path)
1276 alcaList.remove(
'DQM')
1278 if isinstance(alcastream,cms.Path):
1280 self.blacklist_paths.append(alcastream)
1283 if len(alcaList) != 0:
1285 for name
in alcaConfig.__dict__:
1286 alcastream = getattr(alcaConfig,name)
1287 if isinstance(alcastream,cms.FilteredStream):
1288 available.append(name.replace(
'ALCARECOStream',
''))
1289 print "The following alcas could not be found "+
str(alcaList)
1290 print "available ",available
1292 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1297 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1298 print "Loading lhe fragment from",loadFragment
1299 __import__(loadFragment)
1300 self.process.load(loadFragment)
1302 self._options.inlineObjets+=
','+sequence
1304 getattr(self.process,sequence).nEvents =
int(self._options.number)
1307 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1308 self.excludedPaths.append(
"lhe_step")
1309 self.schedule.append( self.process.lhe_step )
1312 """ load the fragment of generator configuration """ 1317 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'')
1319 if not '/' in loadFragment:
1320 loadFragment=
'Configuration.Generator.'+loadFragment
1322 loadFragment=loadFragment.replace(
'/',
'.')
1324 print "Loading generator fragment from",loadFragment
1325 __import__(loadFragment)
1329 if not (self._options.filein
or self._options.dasquery):
1330 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1333 generatorModule=sys.modules[loadFragment]
1334 genModules=generatorModule.__dict__
1337 if self.LHEDefaultSeq
in genModules:
1338 del genModules[self.LHEDefaultSeq]
1340 if self._options.hideGen:
1341 self.loadAndRemember(loadFragment)
1343 self.process.load(loadFragment)
1345 import FWCore.ParameterSet.Modules
as cmstypes
1346 for name
in genModules:
1347 theObject = getattr(generatorModule,name)
1348 if isinstance(theObject, cmstypes._Module):
1349 self._options.inlineObjets=name+
','+self._options.inlineObjets
1350 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1351 self._options.inlineObjets+=
','+name
1353 if sequence == self.GENDefaultSeq
or sequence ==
'pgen_genonly':
1354 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1355 self.productionFilterSequence =
'ProductionFilterSequence' 1356 elif 'generator' in genModules:
1357 self.productionFilterSequence =
'generator' 1359 """ Enrich the schedule with the rest of the generation step """ 1360 self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1361 genSeqName=sequence.split(
'.')[-1]
1365 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1366 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1367 self.loadAndRemember(cffToBeLoaded)
1369 raise Exception(
"VertexSmearing type or beamspot "+self._options.beamspot+
" unknown.")
1371 if self._options.scenario ==
'HeavyIons':
1372 if self._options.pileup==
'HiMixGEN':
1373 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1375 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1377 self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1378 self.schedule.append(self.process.generation_step)
1381 self.executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1383 if 'reGEN' in self.stepMap:
1387 """ Enrich the schedule with the summary of the filter step """ 1389 self.loadAndRemember(
"GeneratorInterface/Core/genFilterSummary_cff")
1390 self.scheduleSequenceAtEnd(
'genFilterSummary',
'genfiltersummary_step')
1394 """ Enrich the schedule with the simulation step""" 1395 self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1396 if not self._options.fast:
1397 if self._options.gflash==
True:
1398 self.loadAndRemember(
"Configuration/StandardSequences/GFlashSIM_cff")
1400 if self._options.magField==
'0T':
1401 self.executeAndRemember(
"process.g4SimHits.UseMagneticField = cms.bool(False)")
1403 if self._options.magField==
'0T':
1404 self.executeAndRemember(
"process.famosSimHits.UseMagneticField = cms.bool(False)")
1406 self.scheduleSequence(sequence.split(
'.')[-1],
'simulation_step')
1410 """ Enrich the schedule with the digitisation step""" 1413 if self._options.gflash==
True:
1414 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1416 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1417 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1419 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and not self.process.source.type_()==
'EmptySource':
1420 if self._options.inputEventContent==
'':
1421 self._options.inputEventContent=
'REGEN' 1423 self._options.inputEventContent=self._options.inputEventContent+
',REGEN' 1430 """ Enrich the schedule with the digitisation step""" 1435 if sequence ==
'pdigi_valid':
1436 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1438 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1444 """ Enrich the schedule with the digitisation step""" 1445 self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1447 self.loadAndRemember(
"SimGeneral/MixingModule/digi_MixPreMix_cfi")
1450 if sequence ==
'pdigi_valid':
1451 self.executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1453 self.executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1455 self.scheduleSequence(sequence.split(
'.')[-1],
'digitisation_step')
1459 """ Enrich the schedule with the crossing frame writer step""" 1465 """ Enrich the schedule with the digitisation step""" 1469 if self._options.pileup_input:
1471 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
1472 theFiles=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1473 elif self._options.pileup_input.startswith(
"filelist:"):
1474 theFiles= (
filesFromList(self._options.pileup_input[9:]))[0]
1476 theFiles=self._options.pileup_input.split(
',')
1478 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1485 if "DIGIPREMIX" in self.stepMap.keys():
1487 self.
executeAndRemember(
"process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')")
1497 """ Enrich the schedule with the L1 simulation step""" 1498 assert(sequence ==
None)
1504 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1505 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT']
1506 if sequence
in supported:
1507 self.loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1508 if self._options.scenario ==
'HeavyIons':
1509 self.renameInputTagsInSequence(
"SimL1Emulator",
"rawDataCollector",
"rawDataRepacker")
1510 self.scheduleSequence(
'SimL1Emulator',
'L1RePack_step')
1512 print "L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported
1517 """ Enrich the schedule with the HLT simulation step""" 1519 print "no specification of the hlt menu has been given, should never happen" 1520 raise Exception(
'no HLT sequence provided')
1524 from Configuration.HLT.autoHLT
import autoHLT
1527 sequence = autoHLT[key]
1529 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1535 if self._options.scenario ==
'HeavyIons':
1536 optionsForHLT[
'type'] =
'HIon' 1538 optionsForHLT[
'type'] =
'GRun' 1539 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in optionsForHLT.iteritems())
1540 if sequence ==
'run,fromSource':
1541 if hasattr(self.process.source,
'firstRun'):
1542 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1543 elif hasattr(self.process.source,
'setRunNumber'):
1544 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1546 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1548 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1552 if self._options.isMC:
1553 self._options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1555 if self._options.name !=
'HLT':
1556 self.additionalCommands.append(
'from HLTrigger.Configuration.CustomConfigs import ProcessName')
1557 self.additionalCommands.append(
'process = ProcessName(process)')
1558 self.additionalCommands.append(
'')
1559 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1562 self.schedule.append(self.process.HLTSchedule)
1563 [self.blacklist_paths.append(path)
for path
in self.process.HLTSchedule
if isinstance(path,(cms.Path,cms.EndPath))]
1566 if self._options.fast:
1567 if not hasattr(self.process,
'HLTEndSequence'):
1568 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1573 seqReco=sequence.split(
',')[1]
1574 seqDigi=sequence.split(
',')[0]
1576 print "RAW2RECO requires two specifications",sequence,
"insufficient" 1590 self.
loadAndRemember(
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1592 for filt
in allMetFilterPaths:
1593 self.schedule.append(getattr(self.process,
'Flag_'+filt))
1596 ''' Enrich the schedule with L1 HW validation ''' 1599 print '\n\n\n DEPRECATED this has no action \n\n\n' 1603 ''' Enrich the schedule with L1 reconstruction ''' 1609 ''' Enrich the schedule with L1 reconstruction ''' 1615 ''' Enrich the schedule with a user defined filter sequence ''' 1617 filterConfig=self.load(sequence.split(
'.')[0])
1618 filterSeq=sequence.split(
'.')[-1]
1626 label=visitee.label()
1634 getattr(self.process,filterSeq).
visit( expander )
1635 self._options.inlineObjets+=
','+expander.inliner
1636 self._options.inlineObjets+=
','+filterSeq
1639 self.scheduleSequence(filterSeq,
'filtering_step')
1640 self.nextScheduleIsConditional=
True 1642 self.productionFilterSequence = filterSeq
1647 ''' Enrich the schedule with reconstruction ''' 1653 ''' Enrich the schedule with reconstruction ''' 1659 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1660 if not self._options.fast:
1661 print "ERROR: this step is only implemented for FastSim" 1664 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1668 ''' Enrich the schedule with PAT ''' 1671 self.labelsToAssociate.append(
'patTask')
1672 if not self._options.runUnscheduled:
1673 raise Exception(
"MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1674 if self._options.isData:
1675 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1677 if self._options.fast:
1678 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1680 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1682 if self._options.hltProcess:
1683 if len(self._options.customise_commands) > 1:
1684 self._options.customise_commands = self._options.customise_commands +
" \n" 1685 self._options.customise_commands = self._options.customise_commands +
"process.patTrigger.processName = \""+self._options.hltProcess+
"\"\n" 1686 self._options.customise_commands = self._options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1693 ''' Enrich the schedule with PATGEN ''' 1695 self.labelsToAssociate.append(
'patGENTask')
1696 if not self._options.runUnscheduled:
1697 raise Exception(
"MiniGEN production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1698 if self._options.isData:
1699 raise Exception(
"PATGEN step can only run on MC")
1703 ''' Enrich the schedule with NANO ''' 1706 custom =
"nanoAOD_customizeData" if self._options.isData
else "nanoAOD_customizeMC" 1707 if self._options.runUnscheduled:
1708 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1710 self._options.customisation_file.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1711 if self._options.hltProcess:
1712 if len(self._options.customise_commands) > 1:
1713 self._options.customise_commands = self._options.customise_commands +
" \n" 1714 self._options.customise_commands = self._options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1718 ''' Enrich the schedule with event interpretation ''' 1719 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1720 if sequence
in EventInterpretation:
1721 self.EIDefaultCFF = EventInterpretation[sequence]
1722 sequence =
'EIsequence' 1724 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1725 self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1726 self.scheduleSequence(sequence.split(
'.')[-1],
'eventinterpretaion_step')
1730 ''' Enrich the schedule with skimming fragments''' 1732 sequence = sequence.split(
'.')[-1]
1734 skimlist=sequence.split(
'+')
1736 from Configuration.Skimming.autoSkim
import autoSkim
1740 for skim
in skimConfig.__dict__:
1741 skimstream = getattr(skimConfig,skim)
1742 if isinstance(skimstream,cms.Path):
1744 self.blacklist_paths.append(skimstream)
1745 if (
not isinstance(skimstream,cms.FilteredStream)):
1747 shortname = skim.replace(
'SKIMStream',
'')
1748 if (sequence==
"all"):
1750 elif (shortname
in skimlist):
1753 if self._options.datatier==
'DQM':
1754 self.process.load(self.EVTCONTDefaultCFF)
1755 skimstreamDQM = cms.FilteredStream(
1756 responsible = skimstream.responsible,
1757 name = skimstream.name+
'DQM',
1758 paths = skimstream.paths,
1759 selectEvents = skimstream.selectEvents,
1760 content = self._options.datatier+
'EventContent',
1761 dataTier = cms.untracked.string(self._options.datatier)
1764 for i
in range(skimlist.count(shortname)):
1765 skimlist.remove(shortname)
1769 if (skimlist.__len__()!=0
and sequence!=
"all"):
1770 print 'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist)
1771 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1774 ''' Enrich the schedule with a user defined sequence ''' 1780 """ Enrich the schedule with the postreco step """ 1787 print sequence,
"in preparing validation" 1789 from Validation.Configuration.autoValidation
import autoValidation
1791 sequence=sequence.split(
'.')[-1]
1792 if sequence.find(
',')!=-1:
1793 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1794 valSeqName=sequence.split(
',')[1].
split(
'+')
1799 prevalSeqName=sequence.split(
'+')
1800 valSeqName=sequence.split(
'+')
1806 postfix=
'_'+sequence
1807 prevalSeqName=[
'prevalidation'+postfix]
1808 valSeqName=[
'validation'+postfix]
1809 if not hasattr(self.process,valSeqName[0]):
1811 valSeqName=[sequence]
1822 if (
'HLT' in self.stepMap
and not self._options.fast)
or self._options.hltProcess:
1823 for s
in valSeqName+prevalSeqName:
1826 for (i,s)
in enumerate(prevalSeqName):
1828 setattr(self.process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1829 self.schedule.append(getattr(self.process,
'prevalidation_step%s'%NFI(i)))
1831 for (i,s)
in enumerate(valSeqName):
1832 setattr(self.process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1833 self.schedule.append(getattr(self.process,
'validation_step%s'%NFI(i)))
1836 if 'PAT' in self.stepMap
and not 'RECO' in self.stepMap:
1839 if not 'DIGI' in self.stepMap
and not self._options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1840 if self._options.restoreRNDSeeds==
False and not self._options.restoreRNDSeeds==
True:
1841 self._options.restoreRNDSeeds=
True 1843 if not 'DIGI' in self.stepMap
and not self._options.fast:
1847 self._options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1849 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1851 for (i,s)
in enumerate(valSeqName):
1852 getattr(self.process,
'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,
'validation_step%s'%NFI(i))._seq
1858 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1859 It will climb down within PSets, VPSets and VInputTags to find its target""" 1860 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1867 if isinstance(pset, cms._Parameterizable):
1868 for name
in pset.parameters_().
keys():
1874 value = getattr(pset,name)
1875 type = value.pythonTypeName()
1876 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1877 self.
doIt(value,base+
"."+name)
1878 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1879 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1880 elif type
in (
'cms.string',
'cms.untracked.string'):
1882 if self.
_verbose:
print "set string process name %s.%s %s ==> %s"% (base, name, value, self.
_paramReplace)
1884 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1885 for (i,n)
in enumerate(value):
1886 if not isinstance(n, cms.InputTag):
1890 if self.
_verbose:
print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self.
_paramReplace)
1893 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1894 for (i,n)
in enumerate(value):
1897 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1899 if self.
_verbose:
print "set process name %s.%s %s ==> %s " % (base, name, value, self.
_paramReplace)
1900 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1905 label = visitee.label()
1906 except AttributeError:
1907 label =
'<Module not in a Process>' 1909 label =
'other execption' 1910 self.
doIt(visitee, label)
1917 print "Replacing all InputTag %s => %s"%(oldT,newT)
1920 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1921 if not loadMe
in self.additionalCommands:
1922 self.additionalCommands.append(loadMe)
1923 self.additionalCommands.append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1927 if self._options.hltProcess:
1928 proc=self._options.hltProcess
1930 proc=self.process.name_()
1931 if proc==HLTprocess:
return 1933 print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1935 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1936 self.additionalCommands.append(
'from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1937 self.additionalCommands.append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1943 while '@' in repr(seqList)
and level<maxLevel:
1945 for specifiedCommand
in seqList:
1946 if specifiedCommand.startswith(
'@'):
1947 location=specifiedCommand[1:]
1948 if not location
in mapping:
1949 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1950 mappedTo=mapping[location]
1952 mappedTo=mappedTo[index]
1953 seqList.remove(specifiedCommand)
1954 seqList.extend(mappedTo.split(
'+'))
1957 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1963 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1964 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1965 from DQMOffline.Configuration.autoDQM
import autoDQM
1969 if len(set(sequenceList))!=len(sequenceList):
1970 sequenceList=
list(set(sequenceList))
1971 print "Duplicate entries for DQM:, using",sequenceList
1973 pathName=
'dqmoffline_step' 1974 for (i,sequence)
in enumerate(sequenceList):
1976 pathName=
'dqmoffline_%d_step'%(i)
1978 if 'HLT' in self.stepMap.keys()
or self._options.hltProcess:
1981 setattr(self.process,pathName, cms.EndPath( getattr(self.process,sequence ) ) )
1982 self.schedule.append(getattr(self.process,pathName))
1984 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1986 getattr(self.process,pathName).
insert(0,self.process.genstepfilter)
1988 pathName=
'dqmofflineOnPAT_step' 1989 for (i,sequence)
in enumerate(postSequenceList):
1991 pathName=
'dqmofflineOnPAT_%d_step'%(i)
1993 setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1994 self.schedule.append(getattr(self.process,pathName))
1997 """ Enrich the process with harvesting step """ 1998 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self._options.harvesting+
'_cff' 2002 sequence = sequence.split(
'.')[-1]
2005 harvestingList = sequence.split(
"+")
2006 from DQMOffline.Configuration.autoDQM
import autoDQM
2007 from Validation.Configuration.autoValidation
import autoValidation
2009 combined_mapping = copy.deepcopy( autoDQM )
2010 combined_mapping.update( autoValidation )
2011 self.
expandMapping(harvestingList,combined_mapping,index=-1)
2013 if len(set(harvestingList))!=len(harvestingList):
2014 harvestingList=
list(set(harvestingList))
2015 print "Duplicate entries for HARVESTING, using",harvestingList
2017 for name
in harvestingList:
2018 if not name
in harvestingConfig.__dict__:
2019 print name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
2021 harvestingstream = getattr(harvestingConfig,name)
2022 if isinstance(harvestingstream,cms.Path):
2023 self.schedule.append(harvestingstream)
2024 self.blacklist_paths.append(harvestingstream)
2025 if isinstance(harvestingstream,cms.Sequence):
2026 setattr(self.process,name+
"_step",cms.Path(harvestingstream))
2027 self.schedule.append(getattr(self.process,name+
"_step"))
2033 """ Enrich the process with AlCaHarvesting step """ 2035 sequence=sequence.split(
".")[-1]
2038 harvestingList = sequence.split(
"+")
2042 from Configuration.AlCa.autoPCL
import autoPCL
2045 for name
in harvestingConfig.__dict__:
2046 harvestingstream = getattr(harvestingConfig,name)
2047 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2048 self.schedule.append(harvestingstream)
2049 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2050 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2051 harvestingList.remove(name)
2053 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2054 self.schedule.append(lastStep)
2056 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2057 print "The following harvesting could not be found : ", harvestingList
2058 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2068 self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2069 self.schedule.append(self.process.reconstruction)
2073 """ Add useful info for the production. """ 2074 self.process.configurationMetadata=cms.untracked.PSet\
2075 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2076 name=cms.untracked.string(
"Applications"),
2077 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2080 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2084 """ Prepare the configuration string and add missing pieces.""" 2096 outputModuleCfgCode=
"" 2097 if not 'HARVESTING' in self.stepMap.keys()
and not 'ALCAHARVEST' in self.stepMap.keys()
and not 'ALCAOUTPUT' in self.stepMap.keys()
and self.with_output:
2102 self.pythonCfgCode =
"# Auto generated configuration file\n" 2103 self.pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2104 self.pythonCfgCode +=
"# with command line options: "+self._options.arguments+
'\n' 2105 self.pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2106 if hasattr(self.
_options,
"era")
and self._options.era :
2107 self.pythonCfgCode +=
"from Configuration.StandardSequences.Eras import eras\n\n" 2108 self.pythonCfgCode +=
"process = cms.Process('"+self.process.name_()+
"'" 2110 for requestedEra
in self._options.era.split(
",") :
2111 self.pythonCfgCode +=
",eras."+requestedEra
2112 self.pythonCfgCode +=
")\n\n" 2114 self.pythonCfgCode +=
"process = cms.Process('"+self.process.name_()+
"')\n\n" 2116 self.pythonCfgCode +=
"# import of standard configurations\n" 2117 for module
in self.imports:
2118 self.pythonCfgCode += (
"process.load('"+module+
"')\n")
2121 if not hasattr(self.process,
"configurationMetadata"):
2125 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2127 self.pythonCfgCode +=
"\n" 2128 for comment,object
in self.addedObjects:
2130 self.pythonCfgCode +=
"\n# "+comment+
"\n" 2131 self.pythonCfgCode +=
dumpPython(self.process,object)
2134 self.pythonCfgCode +=
"\n# Output definition\n" 2135 self.pythonCfgCode += outputModuleCfgCode
2138 self.pythonCfgCode +=
"\n# Additional output definition\n" 2140 nl=self.additionalOutputs.keys()
2143 output = self.additionalOutputs[name]
2144 self.pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2145 tmpOut = cms.EndPath(output)
2146 setattr(self.process,name+
'OutPath',tmpOut)
2147 self.schedule.append(tmpOut)
2150 self.pythonCfgCode +=
"\n# Other statements\n" 2151 for command
in self.additionalCommands:
2152 self.pythonCfgCode += command +
"\n" 2155 for object
in self._options.inlineObjets.split(
','):
2158 if not hasattr(self.process,object):
2159 print 'cannot inline -'+object+
'- : not known' 2161 self.pythonCfgCode +=
'\n' 2162 self.pythonCfgCode +=
dumpPython(self.process,object)
2165 self.pythonCfgCode +=
"\n# Path and EndPath definitions\n" 2166 for path
in self.process.paths:
2167 if getattr(self.process,path)
not in self.blacklist_paths:
2168 self.pythonCfgCode +=
dumpPython(self.process,path)
2170 for endpath
in self.process.endpaths:
2171 if getattr(self.process,endpath)
not in self.blacklist_paths:
2172 self.pythonCfgCode +=
dumpPython(self.process,endpath)
2175 self.pythonCfgCode +=
"\n# Schedule definition\n" 2176 result =
"process.schedule = cms.Schedule(" 2179 self.process.schedule = cms.Schedule()
2180 for item
in self.schedule:
2181 if not isinstance(item, cms.Schedule):
2182 self.process.schedule.append(item)
2184 self.process.schedule.extend(item)
2186 if hasattr(self.process,
"HLTSchedule"):
2187 beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2188 afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2189 pathNames = [
'process.'+p.label_()
for p
in beforeHLT]
2190 result +=
','.
join(pathNames)+
')\n' 2191 result +=
'process.schedule.extend(process.HLTSchedule)\n' 2192 pathNames = [
'process.'+p.label_()
for p
in afterHLT]
2193 result +=
'process.schedule.extend(['+
','.
join(pathNames)+
'])\n' 2195 pathNames = [
'process.'+p.label_()
for p
in self.schedule]
2196 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2198 self.pythonCfgCode += result
2200 for labelToAssociate
in self.labelsToAssociate:
2201 self.process.schedule.associate(getattr(self.process, labelToAssociate))
2202 self.pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2206 self.pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2207 self.pythonCfgCode+=
"associatePatAlgosToolsTask(process)\n" 2209 if self._options.nThreads
is not "1":
2210 self.pythonCfgCode +=
"\n" 2211 self.pythonCfgCode +=
"#Setup FWK for multithreaded\n" 2212 self.pythonCfgCode +=
"process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+
")\n" 2213 self.pythonCfgCode +=
"process.options.numberOfStreams=cms.untracked.uint32(0)\n" 2215 if self._options.isRepacked:
2216 self.pythonCfgCode +=
"\n" 2217 self.pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2218 self.pythonCfgCode +=
"MassReplaceInputTag(process)\n" 2219 MassReplaceInputTag(self.process)
2222 if self.productionFilterSequence:
2223 self.pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2224 self.pythonCfgCode +=
'for path in process.paths:\n' 2225 if len(self.conditionalPaths):
2226 self.pythonCfgCode +=
'\tif not path in %s: continue\n'%
str(self.conditionalPaths)
2227 if len(self.excludedPaths):
2228 self.pythonCfgCode +=
'\tif path in %s: continue\n'%
str(self.excludedPaths)
2229 self.pythonCfgCode +=
'\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2230 pfs = getattr(self.process,self.productionFilterSequence)
2231 for path
in self.process.paths:
2232 if not path
in self.conditionalPaths:
continue 2233 if path
in self.excludedPaths:
continue 2234 getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2240 if self._options.runUnscheduled:
2243 self.pythonCfgCode+=
"#do not add changes to your config after this point (unless you know what you are doing)\n" 2244 self.pythonCfgCode+=
"from FWCore.ParameterSet.Utilities import convertToUnscheduled\n" 2245 self.pythonCfgCode+=
"process=convertToUnscheduled(process)\n" 2247 from FWCore.ParameterSet.Utilities
import convertToUnscheduled
2254 if hasattr(self.process,
"logErrorHarvester"):
2256 self.pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2257 self.pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2258 self.pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2259 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2266 self.pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2267 self.pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2268 self.pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2269 self.pythonCfgCode +=
"# End adding early deletion\n" 2270 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2276 if self._options.io:
2278 if not self._options.io.endswith(
'.io'): self._option.io+=
'.io' 2279 io=open(self._options.io,
'w')
2281 if hasattr(self.process.source,
"fileNames"):
2282 if len(self.process.source.fileNames.value()):
2283 ioJson[
'primary']=self.process.source.fileNames.value()
2284 if hasattr(self.process.source,
"secondaryFileNames"):
2285 if len(self.process.source.secondaryFileNames.value()):
2286 ioJson[
'secondary']=self.process.source.secondaryFileNames.value()
2287 if self._options.pileup_input
and (self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:')):
2288 ioJson[
'pileup']=self._options.pileup_input[4:]
2289 for (o,om)
in self.process.outputModules_().
items():
2290 ioJson[o]=om.fileName.value()
2291 ioJson[
'GT']=self.process.GlobalTag.globaltag.value()
2292 if self.productionFilterSequence:
2293 ioJson[
'filter']=self.productionFilterSequence
2295 io.write(json.dumps(ioJson))
def load(self, includeFile)
def filesFromOption(self)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def prepare_L1REPACK(self, sequence=None)
def finalizeFastSimHLT(self)
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def doNotInlineEventContent(instance, label="cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)")
event content
def customiseEarlyDelete(process)
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_DIGIPREMIX(self, sequence=None)
def prepare_RECO(self, sequence="reconstruction")
put the filtering path in the schedule
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def prepare_PATGEN(self, sequence="miniGEN")
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def prepare_RECOSIM(self, sequence="recosim")
def addCustomise(self, unsch=0)
def prepare_DIGIPREMIX_S2(self, sequence=None)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
now for a useful command
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, sequence=None)
def convertToUnscheduled(proc)
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_NANO(self, sequence="nanoAOD")
inliner
needs to be in reverse order
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
def prepare_ALCAHARVEST(self, sequence=None)
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def completeInputCommand(self)
add the corresponding input content
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def addStandardSequences(self)
def prepare_PAT(self, sequence="miniAOD")