3 __version__ =
"$Revision: 1.19 $" 4 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 6 import FWCore.ParameterSet.Config
as cms
7 from FWCore.ParameterSet.Modules
import _Module
11 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
16 from subprocess
import Popen,PIPE
17 import FWCore.ParameterSet.DictTypes
as DictTypes
23 defaultOptions.datamix =
'DataOnSim' 24 defaultOptions.isMC=
False 25 defaultOptions.isData=
True 26 defaultOptions.step=
'' 27 defaultOptions.pileup=
'NoPileUp' 28 defaultOptions.pileup_input =
None 29 defaultOptions.pileup_dasoption =
'' 30 defaultOptions.geometry =
'SimDB' 31 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
32 defaultOptions.magField =
'' 33 defaultOptions.conditions =
None 34 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
35 defaultOptions.harvesting=
'AtRunEnd' 36 defaultOptions.gflash =
False 37 defaultOptions.number = -1
38 defaultOptions.number_out =
None 39 defaultOptions.arguments =
"" 40 defaultOptions.name =
"NO NAME GIVEN" 41 defaultOptions.evt_type =
"" 42 defaultOptions.filein =
"" 43 defaultOptions.dasquery=
"" 44 defaultOptions.dasoption=
"" 45 defaultOptions.secondfilein =
"" 46 defaultOptions.customisation_file = []
47 defaultOptions.customisation_file_unsch = []
48 defaultOptions.customise_commands =
"" 49 defaultOptions.inline_custom=
False 50 defaultOptions.particleTable =
'pythiapdt' 51 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
52 defaultOptions.dirin =
'' 53 defaultOptions.dirout =
'' 54 defaultOptions.filetype =
'EDM' 55 defaultOptions.fileout =
'output.root' 56 defaultOptions.filtername =
'' 57 defaultOptions.lazy_download =
False 58 defaultOptions.custom_conditions =
'' 59 defaultOptions.hltProcess =
'' 60 defaultOptions.eventcontent =
None 61 defaultOptions.datatier =
None 62 defaultOptions.inlineEventContent =
True 63 defaultOptions.inlineObjets =
'' 64 defaultOptions.hideGen=
False 65 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
66 defaultOptions.beamspot=
None 67 defaultOptions.outputDefinition =
'' 68 defaultOptions.inputCommands =
None 69 defaultOptions.outputCommands =
None 70 defaultOptions.inputEventContent =
'' 71 defaultOptions.dropDescendant =
False 72 defaultOptions.relval =
None 73 defaultOptions.profile =
None 74 defaultOptions.isRepacked =
False 75 defaultOptions.restoreRNDSeeds =
False 76 defaultOptions.donotDropOnInput =
'' 77 defaultOptions.python_filename =
'' 78 defaultOptions.io=
None 79 defaultOptions.lumiToProcess=
None 80 defaultOptions.fast=
False 81 defaultOptions.runsAndWeightsForMC =
None 82 defaultOptions.runsScenarioForMC =
None 83 defaultOptions.runUnscheduled =
False 84 defaultOptions.timeoutOutput =
False 85 defaultOptions.nThreads =
'1' 89 theObject = getattr(process,name)
90 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
91 return "process."+name+
" = " + theObject.dumpPython(
"process")
92 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
93 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 95 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 98 import FWCore.ParameterSet.Config
as cms
101 for line
in open(fileName,
'r'): 102 if line.count(
".root")>=2:
104 entries=line.replace(
"\n",
"").
split()
105 if not entries[0]
in prim:
106 prim.append(entries[0])
107 if not entries[1]
in sec:
108 sec.append(entries[1])
109 elif (line.find(
".root")!=-1):
110 entry=line.replace(
"\n",
"")
111 if not entry
in prim:
114 if not hasattr(s,
"fileNames"):
115 s.fileNames=cms.untracked.vstring(prim)
117 s.fileNames.extend(prim)
119 if not hasattr(s,
"secondaryFileNames"):
120 s.secondaryFileNames=cms.untracked.vstring(sec)
122 s.secondaryFileNames.extend(sec)
123 print "found files: ",prim
125 raise Exception(
"There are not files in input from the file list")
127 print "found parent files:",sec
132 import FWCore.ParameterSet.Config
as cms
135 print "the query is",query
138 while eC!=0
and count<3:
140 print 'Sleeping, then retrying DAS' 142 p = Popen(
'das_client %s --query "%s"'%(option,query), stdout=PIPE,shell=
True)
144 tupleP = os.waitpid(p.pid, 0)
148 print "DAS succeeded after",count,
"attempts",eC
150 print "DAS failed 3 times- I give up" 151 for line
in pipe.split(
'\n'):
152 if line.count(
".root")>=2:
154 entries=line.replace(
"\n",
"").
split()
155 if not entries[0]
in prim:
156 prim.append(entries[0])
157 if not entries[1]
in sec:
158 sec.append(entries[1])
159 elif (line.find(
".root")!=-1):
160 entry=line.replace(
"\n",
"")
161 if not entry
in prim:
164 if not hasattr(s,
"fileNames"):
165 s.fileNames=cms.untracked.vstring(prim)
167 s.fileNames.extend(prim)
169 if not hasattr(s,
"secondaryFileNames"):
170 s.secondaryFileNames=cms.untracked.vstring(sec)
172 s.secondaryFileNames.extend(sec)
173 print "found files: ",prim
175 print "found parent files:",sec
178 def anyOf(listOfKeys,dict,opt=None):
187 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
190 """The main building routines """ 192 def __init__(self, options, process = None, with_output = False, with_input = False ):
193 """options taken from old cmsDriver and optparse """ 195 options.outfile_name = options.dirout+options.fileout
199 if self._options.isData
and options.isMC:
200 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
205 if 'ENDJOB' in self._options.step:
206 if (hasattr(self.
_options,
"outputDefinition")
and \
207 self._options.outputDefinition !=
'' and \
208 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self._options.outputDefinition)))
or \
209 (hasattr(self.
_options,
"datatier")
and \
210 self._options.datatier
and \
211 'DQMIO' in self._options.datatier):
212 print "removing ENDJOB from steps since not compatible with DQMIO dataTier" 213 self._options.step=self._options.step.replace(
',ENDJOB',
'')
218 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
221 for step
in self._options.step.split(
","):
222 if step==
'':
continue 223 stepParts = step.split(
":")
224 stepName = stepParts[0]
225 if stepName
not in stepList
and not stepName.startswith(
're'):
226 raise ValueError(
"Step "+stepName+
" unknown")
227 if len(stepParts)==1:
228 self.stepMap[stepName]=
"" 229 elif len(stepParts)==2:
230 self.stepMap[stepName]=stepParts[1].
split(
'+')
231 elif len(stepParts)==3:
232 self.stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
234 raise ValueError(
"Step definition "+step+
" invalid")
235 self.stepKeys.append(stepName)
239 self.with_output = with_output
240 if hasattr(self.
_options,
"no_output_flag")
and self._options.no_output_flag:
241 self.with_output =
False 242 self.with_input = with_input
244 self.process = cms.Process(self._options.name)
246 self.process = process
249 self.schedule =
list()
255 self.additionalCommands = []
257 self.blacklist_paths = []
258 self.addedObjects = []
259 self.additionalOutputs = {}
261 self.productionFilterSequence =
None 262 self.labelsToAssociate=[]
263 self.nextScheduleIsConditional=
False 264 self.conditionalPaths=[]
265 self.excludedPaths=[]
270 Function to add the igprof profile service so that you can dump in the middle 273 profileOpts = self._options.profile.split(
':')
275 profilerInterval = 100
276 profilerFormat =
None 277 profilerJobFormat =
None 283 startEvent = profileOpts.pop(0)
284 if not startEvent.isdigit():
285 raise Exception(
"%s is not a number" % startEvent)
286 profilerStart =
int(startEvent)
288 eventInterval = profileOpts.pop(0)
289 if not eventInterval.isdigit():
290 raise Exception(
"%s is not a number" % eventInterval)
291 profilerInterval =
int(eventInterval)
293 profilerFormat = profileOpts.pop(0)
296 if not profilerFormat:
297 profilerFormat =
"%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace(
"_cfi",
""),
299 self._options.pileup,
300 self._options.conditions,
301 self._options.datatier,
302 self._options.profileTypeLabel)
303 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
304 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
305 elif not profilerJobFormat:
306 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 308 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
311 includeFile = includeFile.replace(
'/',
'.')
312 self.process.load(includeFile)
313 return sys.modules[includeFile]
316 """helper routine to load am memorize imports""" 319 includeFile = includeFile.replace(
'/',
'.')
320 self.imports.append(includeFile)
321 self.process.load(includeFile)
322 return sys.modules[includeFile]
325 """helper routine to remember replace statements""" 326 self.additionalCommands.append(command)
327 if not command.strip().startswith(
"#"):
330 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
334 if 'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys():
335 self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring(
'ProductNotFound'),fileMode = cms.untracked.string(
'FULLMERGE'))
337 self.process.options = cms.untracked.PSet( )
339 self.addedObjects.append((
"",
"options"))
341 if self._options.lazy_download:
342 self.process.AdaptorConfig = cms.Service(
"AdaptorConfig",
343 stats = cms.untracked.bool(
True),
344 enable = cms.untracked.bool(
True),
345 cacheHint = cms.untracked.string(
"lazy-download"),
346 readHint = cms.untracked.string(
"read-ahead-buffered")
348 self.addedObjects.append((
"Setup lazy download",
"AdaptorConfig"))
353 if self._options.profile:
355 self.process.IgProfService = cms.Service(
"IgProfService",
356 reportFirstEvent = cms.untracked.int32(start),
357 reportEventInterval = cms.untracked.int32(interval),
358 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
359 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
360 self.addedObjects.append((
"Setup IGProf Service for profiling",
"IgProfService"))
363 """Here we decide how many evts will be processed""" 364 self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(
int(self._options.number)))
365 if self._options.number_out:
366 self.process.maxEvents.output = cms.untracked.int32(
int(self._options.number_out))
367 self.addedObjects.append((
"",
"maxEvents"))
370 """Here the source is built. Priority: file, generator""" 371 self.addedObjects.append((
"Input source",
"source"))
374 for entry
in self._options.filein.split(
','):
376 if entry.startswith(
"filelist:"):
378 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
379 filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
381 self.process.source.fileNames.append(self._options.dirin+entry)
382 if self._options.secondfilein:
383 if not hasattr(self.process.source,
"secondaryFileNames"):
384 raise Exception(
"--secondfilein not compatible with "+self._options.filetype+
"input type")
385 for entry
in self._options.secondfilein.split(
','):
387 if entry.startswith(
"filelist:"):
388 self.process.source.secondaryFileNames.extend((
filesFromList(entry[9:]))[0])
389 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
390 self.process.source.secondaryFileNames.extend((
filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
392 self.process.source.secondaryFileNames.append(self._options.dirin+entry)
394 if self._options.filein
or self._options.dasquery:
395 if self._options.filetype ==
"EDM":
396 self.process.source=cms.Source(
"PoolSource",
397 fileNames = cms.untracked.vstring(),
398 secondaryFileNames= cms.untracked.vstring())
400 elif self._options.filetype ==
"DAT":
401 self.process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
403 elif self._options.filetype ==
"LHE":
404 self.process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
405 if self._options.filein.startswith(
"lhe:"):
407 args=self._options.filein.split(
':')
409 print 'LHE input from article ',article
410 location=
'/store/lhe/' 412 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
413 for line
in textOfFiles:
414 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
415 self.process.source.fileNames.append(location+article+
'/'+fileName)
418 print 'Issue to load LHE files, please check and try again.' 421 if len(self.process.source.fileNames)==0:
422 print 'Issue with empty filename, but can pass line check' 425 self.process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
429 elif self._options.filetype ==
"DQM":
430 self.process.source=cms.Source(
"DQMRootSource",
431 fileNames = cms.untracked.vstring())
434 elif self._options.filetype ==
"DQMDAQ":
436 self.process.source=cms.Source(
"DQMStreamerReader")
439 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
440 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
442 if self._options.dasquery!=
'':
443 self.process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
444 filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
446 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
447 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
450 if 'GEN' in self.stepMap.keys():
451 if self._options.inputCommands:
452 self._options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 454 self._options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 456 if self.process.source
and self._options.inputCommands:
457 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
458 for command
in self._options.inputCommands.split(
','):
460 command = command.strip()
461 if command==
'':
continue 462 self.process.source.inputCommands.append(command)
463 if not self._options.dropDescendant:
464 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
466 if self._options.lumiToProcess:
467 import FWCore.PythonUtilities.LumiList
as LumiList
468 self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange(
LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().
split(
',') )
470 if 'GEN' in self.stepMap.keys()
or 'LHE' in self.stepMap
or (
not self._options.filein
and hasattr(self.
_options,
"evt_type")):
471 if self.process.source
is None:
472 self.process.source=cms.Source(
"EmptySource")
475 self.runsAndWeights=
None 476 if self._options.runsAndWeightsForMC
or self._options.runsScenarioForMC :
477 if not self._options.isMC :
478 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
479 if self._options.runsAndWeightsForMC:
480 self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
482 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
483 if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
484 __import__(RunsAndWeights[self._options.runsScenarioForMC])
485 self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
487 self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
489 if self.runsAndWeights:
490 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
492 self.additionalCommands.append(
'import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
493 self.additionalCommands.append(
'ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
498 """ Add output module to the process """ 500 if self._options.outputDefinition:
501 if self._options.datatier:
502 print "--datatier & --eventcontent options ignored" 505 outList = eval(self._options.outputDefinition)
506 for (id,outDefDict)
in enumerate(outList):
507 outDefDictStr=outDefDict.__str__()
508 if not isinstance(outDefDict,dict):
509 raise Exception(
"--output needs to be passed a list of dict"+self._options.outputDefinition+
" is invalid")
511 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
514 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
515 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
516 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
517 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
518 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
520 if not theModuleLabel:
521 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
522 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
523 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 525 for name
in tryNames:
526 if not hasattr(self.process,name):
529 if not theModuleLabel:
530 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
532 defaultFileName=self._options.outfile_name
534 defaultFileName=self._options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
536 theFileName=self._options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
537 if not theFileName.endswith(
'.root'):
540 if len(outDefDict.keys()):
541 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
542 if theStreamType==
'DQMIO': theStreamType=
'DQM' 543 if theStreamType==
'ALL':
544 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
546 theEventContent = getattr(self.process, theStreamType+
"EventContent")
550 if theStreamType==
'ALCARECO' and not theFilterName:
551 theFilterName=
'StreamALCACombined' 554 CppType=
'PoolOutputModule' 555 if self._options.timeoutOutput:
556 CppType=
'TimeoutPoolOutputModule' 557 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 558 output = cms.OutputModule(CppType,
559 theEventContent.clone(),
560 fileName = cms.untracked.string(theFileName),
561 dataset = cms.untracked.PSet(
562 dataTier = cms.untracked.string(theTier),
563 filterName = cms.untracked.string(theFilterName))
565 if not theSelectEvent
and hasattr(self.process,
'generation_step')
and theStreamType!=
'LHE':
566 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
567 if not theSelectEvent
and hasattr(self.process,
'filtering_step'):
568 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
570 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
573 if not hasattr(output,
'SelectEvents'):
574 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
575 for alca
in self.AlCaPaths:
576 output.SelectEvents.SelectEvents.extend(getattr(self.process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
579 if hasattr(self.process,theModuleLabel):
580 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
582 setattr(self.process,theModuleLabel,output)
583 outputModule=getattr(self.process,theModuleLabel)
584 setattr(self.process,theModuleLabel+
'_step',cms.EndPath(outputModule))
585 path=getattr(self.process,theModuleLabel+
'_step')
586 self.schedule.append(path)
588 if not self._options.inlineEventContent
and hasattr(self.process,theStreamType+
"EventContent"):
591 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
592 if theExtraOutputCommands:
593 if not isinstance(theExtraOutputCommands,list):
594 raise Exception(
"extra ouput command in --option must be a list of strings")
595 if hasattr(self.process,theStreamType+
"EventContent"):
596 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
598 outputModule.outputCommands.extend(theExtraOutputCommands)
600 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
605 streamTypes=self._options.eventcontent.split(
',')
606 tiers=self._options.datatier.split(
',')
607 if not self._options.outputDefinition
and len(streamTypes)!=len(tiers):
608 raise Exception(
"number of event content arguments does not match number of datatier arguments")
611 if self._options.step.split(
',')[0].
split(
':')[0] ==
'ALCA':
614 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
615 if streamType==
'':
continue 616 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self._options.step:
continue 617 if streamType==
'DQMIO': streamType=
'DQM' 618 theEventContent = getattr(self.process, streamType+
"EventContent")
620 theFileName=self._options.outfile_name
621 theFilterName=self._options.filtername
623 theFileName=self._options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
624 theFilterName=self._options.filtername
625 CppType=
'PoolOutputModule' 626 if self._options.timeoutOutput:
627 CppType=
'TimeoutPoolOutputModule' 628 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 629 output = cms.OutputModule(CppType,
631 fileName = cms.untracked.string(theFileName),
632 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
633 filterName = cms.untracked.string(theFilterName)
636 if hasattr(self.process,
"generation_step")
and streamType!=
'LHE':
637 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
638 if hasattr(self.process,
"filtering_step"):
639 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
641 if streamType==
'ALCARECO':
642 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
644 if "MINIAOD" in streamType:
645 output.dropMetaData = cms.untracked.string(
'ALL')
646 output.fastCloning= cms.untracked.bool(
False)
647 output.overrideInputFileSplitLevels = cms.untracked.bool(
True)
649 outputModuleName=streamType+
'output' 650 setattr(self.process,outputModuleName,output)
651 outputModule=getattr(self.process,outputModuleName)
652 setattr(self.process,outputModuleName+
'_step',cms.EndPath(outputModule))
653 path=getattr(self.process,outputModuleName+
'_step')
654 self.schedule.append(path)
656 if self._options.outputCommands
and streamType!=
'DQM':
657 for evct
in self._options.outputCommands.split(
','):
658 if not evct:
continue 659 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
661 if not self._options.inlineEventContent:
664 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
666 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
672 Add selected standard sequences to the process 675 if self._options.pileup:
676 pileupSpec=self._options.pileup.split(
',')[0]
679 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
680 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
681 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
685 if '.' in pileupSpec:
686 mixingDict={
'file':pileupSpec}
687 elif pileupSpec.startswith(
'file:'):
688 mixingDict={
'file':pileupSpec[5:]}
691 mixingDict=copy.copy(Mixing[pileupSpec])
692 if len(self._options.pileup.split(
','))>1:
693 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(
',')+1:]))
696 if 'file:' in pileupSpec:
698 self.process.load(mixingDict[
'file'])
699 print "inlining mixing module configuration" 700 self._options.inlineObjets+=
',mix' 702 self.loadAndRemember(mixingDict[
'file'])
704 mixingDict.pop(
'file')
705 if not "DATAMIX" in self.stepMap.keys():
706 if self._options.pileup_input:
707 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
708 mixingDict[
'F']=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
709 elif self._options.pileup_input.startswith(
"filelist:"):
710 mixingDict[
'F']=(
filesFromList(self._options.pileup_input[9:]))[0]
712 mixingDict[
'F']=self._options.pileup_input.split(
',')
714 for command
in specialization:
715 self.executeAndRemember(command)
716 if len(mixingDict)!=0:
717 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
722 if len(self.stepMap):
723 self.loadAndRemember(self.GeometryCFF)
724 if (
'SIM' in self.stepMap
or 'reSIM' in self.stepMap)
and not self._options.fast:
725 self.loadAndRemember(self.SimGeometryCFF)
726 if self.geometryDBLabel:
727 self.executeAndRemember(
'process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
729 print "Geometry option",self._options.geometry,
"unknown." 732 if len(self.stepMap):
733 self.loadAndRemember(self.magFieldCFF)
735 for stepName
in self.stepKeys:
736 stepSpec = self.stepMap[stepName]
737 print "Step:", stepName,
"Spec:",stepSpec
738 if stepName.startswith(
're'):
740 if stepName[2:]
not in self._options.donotDropOnInput:
741 self._options.inputEventContent=
'%s,%s'%(stepName.upper(),self._options.inputEventContent)
742 stepName=stepName[2:]
744 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
745 elif type(stepSpec)==list:
746 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
747 elif type(stepSpec)==tuple:
748 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
750 raise ValueError(
"Invalid step definition")
752 if self._options.restoreRNDSeeds!=
False:
754 if self._options.restoreRNDSeeds==
True:
755 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
757 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
758 if self._options.inputEventContent
or self._options.inputCommands:
759 if self._options.inputCommands:
760 self._options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 762 self._options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 766 if self._options.inputEventContent:
768 def dropSecondDropStar(iec):
779 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
780 for evct
in self._options.inputEventContent.split(
','):
781 if evct==
'':
continue 782 theEventContent = getattr(self.process, evct+
"EventContent")
783 if hasattr(theEventContent,
'outputCommands'):
784 self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
785 if hasattr(theEventContent,
'inputCommands'):
786 self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
788 dropSecondDropStar(self.process.source.inputCommands)
790 if not self._options.dropDescendant:
791 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
797 """Add conditions to the process""" 798 if not self._options.conditions:
return 800 if 'FrontierConditions_GlobalTag' in self._options.conditions:
801 print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line' 802 self._options.conditions = self._options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
804 self.loadAndRemember(self.ConditionsDefaultCFF)
806 self.process.GlobalTag =
GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
807 self.additionalCommands.append(
'from Configuration.AlCa.GlobalTag import GlobalTag')
808 self.additionalCommands.append(
'process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
812 """Include the customise code """ 816 for c
in self._options.customisation_file:
817 custOpt.extend(c.split(
","))
819 for c
in self._options.customisation_file_unsch:
820 custOpt.extend(c.split(
","))
826 raise Exception(
"more than . in the specification:"+opt)
827 fileName=opt.split(
'.')[0]
828 if opt.count(
'.')==0: rest=
'customise' 830 rest=opt.split(
'.')[1]
831 if rest==
'py': rest=
'customise' 833 if fileName
in custMap:
834 custMap[fileName].extend(rest.split(
'+'))
836 custMap[fileName]=rest.split(
'+')
841 final_snippet=
'\n# customisation of the process.\n' 845 allFcn.extend(custMap[opt])
847 if allFcn.count(fcn)!=1:
848 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
852 packageName = f.replace(
".py",
"").
replace(
"/",
".")
853 __import__(packageName)
854 package = sys.modules[packageName]
857 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
859 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 860 if self._options.inline_custom:
861 for line
in file(customiseFile,
'r'): 862 if "import FWCore.ParameterSet.Config" in line:
864 final_snippet += line
866 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
867 for fcn
in custMap[f]:
868 print "customising the process with",fcn,
"from",f
869 if not hasattr(package,fcn):
871 raise Exception(
"config "+f+
" has no function "+fcn)
873 self.process=getattr(package,fcn)(self.process)
875 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
876 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
879 final_snippet +=
'\n# End of customisation functions\n' 885 final_snippet=
'\n# Customisation from command line\n' 886 if self._options.customise_commands:
888 for com
in self._options.customise_commands.split(
'\\n'):
889 com=string.lstrip(com)
891 final_snippet +=
'\n'+com
900 if len(self.stepMap):
902 if self._options.particleTable
not in defaultOptions.particleTableList:
903 print 'Invalid particle table provided. Options are:' 904 print defaultOptions.particleTable
907 if len(self.stepMap):
908 self.
loadAndRemember(
'SimGeneral.HepPDTESSource.'+self._options.particleTable+
'_cfi')
925 self.EIDefaultCFF=
None 926 self.SKIMDefaultCFF=
"Configuration/StandardSequences/Skims_cff" 927 self.POSTRECODefaultCFF=
"Configuration/StandardSequences/PostRecoGenerator_cff" 928 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/Validation_cff" 929 self.L1HwValDefaultCFF =
"Configuration/StandardSequences/L1HwVal_cff" 930 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOffline_cff" 931 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/Harvesting_cff" 932 self.ALCAHARVESTDefaultCFF=
"Configuration/StandardSequences/AlCaHarvesting_cff" 933 self.ENDJOBDefaultCFF=
"Configuration/StandardSequences/EndOfProcess_cff" 934 self.ConditionsDefaultCFF =
"Configuration/StandardSequences/FrontierConditions_GlobalTag_cff" 935 self.CFWRITERDefaultCFF =
"Configuration/StandardSequences/CrossingFrameWriter_cff" 936 self.REPACKDefaultCFF=
"Configuration/StandardSequences/DigiToRaw_Repack_cff" 938 if "DATAMIX" in self.stepMap.keys():
939 self.DATAMIXDefaultCFF=
"Configuration/StandardSequences/DataMixer"+self._options.datamix+
"_cff" 940 if self._options.datamix ==
'PreMix':
941 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiDMPreMix_cff" 945 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 947 if "DIGIPREMIX" in self.stepMap.keys():
948 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/Digi_PreMix_cff" 950 self.
L1EMDefaultCFF=
"Configuration/StandardSequences/SimL1EmulatorPreMix_cff" 952 self.ALCADefaultSeq=
None 953 self.LHEDefaultSeq=
'externalLHEProducer' 954 self.GENDefaultSeq=
'pgen' 955 self.SIMDefaultSeq=
'psim' 956 self.DIGIDefaultSeq=
'pdigi' 957 self.DIGIPREMIXDefaultSeq=
'pdigi' 958 self.DIGIPREMIX_S2DefaultSeq=
'pdigi' 959 self.DATAMIXDefaultSeq=
None 960 self.DIGI2RAWDefaultSeq=
'DigiToRaw' 961 self.HLTDefaultSeq=
'GRun' 962 self.L1DefaultSeq=
None 963 self.L1REPACKDefaultSeq=
'GT' 964 self.HARVESTINGDefaultSeq=
None 965 self.ALCAHARVESTDefaultSeq=
None 966 self.CFWRITERDefaultSeq=
None 967 self.RAW2DIGIDefaultSeq=
'RawToDigi' 968 self.L1RecoDefaultSeq=
'L1Reco' 969 self.L1TrackTriggerDefaultSeq=
'L1TrackTrigger' 970 if self._options.fast
or (
'RAW2DIGI' in self.stepMap
and 'RECO' in self.stepMap):
971 self.RECODefaultSeq=
'reconstruction' 973 self.RECODefaultSeq=
'reconstruction_fromRECO' 975 self.EIDefaultSeq=
'top' 976 self.POSTRECODefaultSeq=
None 977 self.L1HwValDefaultSeq=
'L1HwVal' 978 self.DQMDefaultSeq=
'DQMOffline' 979 self.VALIDATIONDefaultSeq=
'' 980 self.ENDJOBDefaultSeq=
'endOfProcess' 981 self.REPACKDefaultSeq=
'DigiToRawRepack' 982 self.PATDefaultSeq=
'miniAOD' 984 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContent_cff" 986 if not self._options.beamspot:
987 self._options.beamspot=VtxSmearedDefaultKey
990 if self._options.isMC==
True:
992 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 993 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 994 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineMC_cff" 995 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 997 self._options.beamspot =
None 1000 if 'reGEN' in self.stepMap:
1001 self.GENDefaultSeq=
'fixGenInfo' 1003 if self._options.scenario==
'cosmics':
1004 self._options.pileup=
'Cosmics' 1005 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1006 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1007 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1008 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentCosmics_cff" 1009 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationCosmics_cff" 1010 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmics_cff" 1011 if self._options.isMC==
True:
1012 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmicsMC_cff" 1013 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingCosmics_cff" 1014 self.RECODefaultSeq=
'reconstructionCosmics' 1015 self.DQMDefaultSeq=
'DQMOfflineCosmics' 1017 if self._options.scenario==
'HeavyIons':
1018 if not self._options.beamspot:
1019 self._options.beamspot=VtxSmearedHIDefaultKey
1020 self.HLTDefaultSeq =
'HIon' 1021 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationHeavyIons_cff" 1022 self.VALIDATIONDefaultSeq=
'' 1023 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentHeavyIons_cff" 1024 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1025 self.RECODefaultSeq=
'reconstructionHeavyIons' 1026 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1027 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIons_cff" 1028 self.DQMDefaultSeq=
'DQMOfflineHeavyIons' 1029 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1030 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingHeavyIons_cff" 1031 if self._options.isMC==
True:
1032 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff" 1035 self.RAW2RECODefaultSeq=
','.
join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1037 self.USERDefaultSeq=
'user' 1038 self.USERDefaultCFF=
None 1041 if self._options.isData:
1042 if self._options.magField==defaultOptions.magField:
1043 print "magnetic field option forced to: AutoFromDBCurrent" 1044 self._options.magField=
'AutoFromDBCurrent' 1045 self.magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace(
'.',
'')+
'_cff' 1046 self.magFieldCFF = self.magFieldCFF.replace(
"__",
'_')
1049 self.GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff' 1050 self.geometryDBLabel=
None 1052 if self._options.fast:
1053 if 'start' in self._options.conditions.lower():
1054 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1056 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1059 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1060 if opt
in GeometryConf:
1061 return GeometryConf[opt]
1065 geoms=self._options.geometry.split(
',')
1069 if '/' in geoms[1]
or '_cff' in geoms[1]:
1070 self.GeometryCFF=geoms[1]
1072 self.GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1074 if (geoms[0].startswith(
'DB:')):
1075 self.SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff' 1076 self.geometryDBLabel=geoms[0][3:]
1079 if '/' in geoms[0]
or '_cff' in geoms[0]:
1080 self.SimGeometryCFF=geoms[0]
1082 simGeometry=geoms[0]
1083 if self._options.gflash==
True:
1084 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1086 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1089 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1090 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1092 if self._options.scenario==
'nocoll' or self._options.scenario==
'cosmics':
1093 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1094 self._options.beamspot=
'NoSmear' 1097 if self._options.fast:
1098 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1099 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1100 self.RECOBEFMIXDefaultCFF =
'FastSimulation.Configuration.Reconstruction_BefMix_cff' 1101 self.RECOBEFMIXDefaultSeq =
'reconstruction_befmix' 1102 self.DQMOFFLINEDefaultCFF=
"FastSimulation.Configuration.DQMOfflineMC_cff" 1105 if self._options.pileup==
'default':
1106 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1107 self._options.pileup=MixingDefaultKey
1111 if self._options.isData:
1112 self._options.pileup=
None 1115 self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1120 output = cms.OutputModule(
"PoolOutputModule")
1121 if stream.selectEvents.parameters_().__len__()!=0:
1122 output.SelectEvents = stream.selectEvents
1124 output.SelectEvents = cms.untracked.PSet()
1125 output.SelectEvents.SelectEvents=cms.vstring()
1126 if isinstance(stream.paths,tuple):
1127 for path
in stream.paths:
1128 output.SelectEvents.SelectEvents.append(path.label())
1130 output.SelectEvents.SelectEvents.append(stream.paths.label())
1134 if isinstance(stream.content,str):
1135 evtPset=getattr(self.process,stream.content)
1136 for p
in evtPset.parameters_():
1137 setattr(output,p,getattr(evtPset,p))
1138 if not self._options.inlineEventContent:
1141 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1143 output.outputCommands = stream.content
1146 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1148 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1149 filterName = cms.untracked.string(stream.name))
1151 if self._options.filtername:
1152 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1155 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1157 if workflow
in (
"producers,full"):
1158 if isinstance(stream.paths,tuple):
1159 for path
in stream.paths:
1160 self.schedule.append(path)
1162 self.schedule.append(stream.paths)
1166 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1167 self.additionalOutputs[name] = output
1168 setattr(self.process,name,output)
1170 if workflow ==
'output':
1172 filterList = output.SelectEvents.SelectEvents
1173 for i, filter
in enumerate(filterList):
1174 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1184 if ( len(sequence.split(
'.'))==1 ):
1186 elif ( len(sequence.split(
'.'))==2 ):
1188 sequence=sequence.split(
'.')[1]
1190 print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a" 1191 print sequence,
"not recognized" 1198 for i,s
in enumerate(seq.split(
'*')):
1200 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1202 p=getattr(self.process,prefix)
1203 p+=getattr(self.process, s)
1204 self.schedule.append(getattr(self.process,prefix))
1209 if self.nextScheduleIsConditional:
1210 self.conditionalPaths.append(prefix)
1211 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1212 self.schedule.append(getattr(self.process,prefix))
1214 for i,s
in enumerate(seq.split(
'+')):
1216 setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1217 self.schedule.append(getattr(self.process,sn))
1231 """ Enrich the process with alca streams """ 1233 sequence = sequence.split(
'.')[-1]
1236 alcaList = sequence.split(
"+")
1238 from Configuration.AlCa.autoAlca
import autoAlca
1242 for name
in alcaConfig.__dict__:
1243 alcastream = getattr(alcaConfig,name)
1244 shortName = name.replace(
'ALCARECOStream',
'')
1245 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1246 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1247 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1248 self.AlCaPaths.append(shortName)
1249 if 'DQM' in alcaList:
1250 if not self._options.inlineEventContent
and hasattr(self.process,name):
1251 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1253 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1256 if self._options.hltProcess
or 'HLT' in self.stepMap:
1257 if isinstance(alcastream.paths,tuple):
1258 for path
in alcastream.paths:
1263 for i
in range(alcaList.count(shortName)):
1264 alcaList.remove(shortName)
1267 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1268 path = getattr(alcaConfig,name)
1269 self.schedule.append(path)
1270 alcaList.remove(
'DQM')
1272 if isinstance(alcastream,cms.Path):
1274 self.blacklist_paths.append(alcastream)
1277 if len(alcaList) != 0:
1279 for name
in alcaConfig.__dict__:
1280 alcastream = getattr(alcaConfig,name)
1281 if isinstance(alcastream,cms.FilteredStream):
1282 available.append(name.replace(
'ALCARECOStream',
''))
1283 print "The following alcas could not be found "+
str(alcaList)
1284 print "available ",available
1286 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1291 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1292 print "Loading lhe fragment from",loadFragment
1293 __import__(loadFragment)
1294 self.process.load(loadFragment)
1296 self._options.inlineObjets+=
','+sequence
1298 getattr(self.process,sequence).nEvents =
int(self._options.number)
1301 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1302 self.excludedPaths.append(
"lhe_step")
1303 self.schedule.append( self.process.lhe_step )
1306 """ load the fragment of generator configuration """ 1311 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'')
1313 if not '/' in loadFragment:
1314 loadFragment=
'Configuration.Generator.'+loadFragment
1316 loadFragment=loadFragment.replace(
'/',
'.')
1318 print "Loading generator fragment from",loadFragment
1319 __import__(loadFragment)
1323 if not (self._options.filein
or self._options.dasquery):
1324 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1327 generatorModule=sys.modules[loadFragment]
1328 genModules=generatorModule.__dict__
1331 if self.LHEDefaultSeq
in genModules:
1332 del genModules[self.LHEDefaultSeq]
1334 if self._options.hideGen:
1335 self.loadAndRemember(loadFragment)
1337 self.process.load(loadFragment)
1339 import FWCore.ParameterSet.Modules
as cmstypes
1340 for name
in genModules:
1341 theObject = getattr(generatorModule,name)
1342 if isinstance(theObject, cmstypes._Module):
1343 self._options.inlineObjets=name+
','+self._options.inlineObjets
1344 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1345 self._options.inlineObjets+=
','+name
1347 if sequence == self.GENDefaultSeq
or sequence ==
'pgen_genonly':
1348 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1349 self.productionFilterSequence =
'ProductionFilterSequence' 1350 elif 'generator' in genModules:
1351 self.productionFilterSequence =
'generator' 1353 """ Enrich the schedule with the rest of the generation step """ 1354 self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1355 genSeqName=sequence.split(
'.')[-1]
1359 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1360 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1361 self.loadAndRemember(cffToBeLoaded)
1363 raise Exception(
"VertexSmearing type or beamspot "+self._options.beamspot+
" unknown.")
1365 if self._options.scenario ==
'HeavyIons':
1366 if self._options.pileup==
'HiMixGEN':
1367 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1369 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1371 self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1372 self.schedule.append(self.process.generation_step)
1375 self.executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1377 if 'reGEN' in self.stepMap:
1381 """ Enrich the schedule with the summary of the filter step """ 1383 self.loadAndRemember(
"GeneratorInterface/Core/genFilterSummary_cff")
1384 self.scheduleSequenceAtEnd(
'genFilterSummary',
'genfiltersummary_step')
1388 """ Enrich the schedule with the simulation step""" 1389 self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1390 if not self._options.fast:
1391 if self._options.gflash==
True:
1392 self.loadAndRemember(
"Configuration/StandardSequences/GFlashSIM_cff")
1394 if self._options.magField==
'0T':
1395 self.executeAndRemember(
"process.g4SimHits.UseMagneticField = cms.bool(False)")
1397 if self._options.magField==
'0T':
1398 self.executeAndRemember(
"process.famosSimHits.UseMagneticField = cms.bool(False)")
1400 self.scheduleSequence(sequence.split(
'.')[-1],
'simulation_step')
1404 """ Enrich the schedule with the digitisation step""" 1407 if self._options.gflash==
True:
1408 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1410 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1411 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1413 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and not self.process.source.type_()==
'EmptySource':
1414 if self._options.inputEventContent==
'':
1415 self._options.inputEventContent=
'REGEN' 1417 self._options.inputEventContent=self._options.inputEventContent+
',REGEN' 1424 """ Enrich the schedule with the digitisation step""" 1429 if sequence ==
'pdigi_valid':
1430 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1432 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1438 """ Enrich the schedule with the digitisation step""" 1439 self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1441 self.loadAndRemember(
"SimGeneral/MixingModule/digi_MixPreMix_cfi")
1444 if sequence ==
'pdigi_valid':
1445 self.executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1447 self.executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1449 self.scheduleSequence(sequence.split(
'.')[-1],
'digitisation_step')
1453 """ Enrich the schedule with the crossing frame writer step""" 1459 """ Enrich the schedule with the digitisation step""" 1463 if self._options.pileup_input:
1465 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
1466 theFiles=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1467 elif self._options.pileup_input.startswith(
"filelist:"):
1468 theFiles= (
filesFromList(self._options.pileup_input[9:]))[0]
1470 theFiles=self._options.pileup_input.split(
',')
1472 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1479 if "DIGIPREMIX" in self.stepMap.keys():
1481 self.
executeAndRemember(
"process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')")
1491 """ Enrich the schedule with the L1 simulation step""" 1492 assert(sequence ==
None)
1498 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1499 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT']
1500 if sequence
in supported:
1501 self.loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1502 if self._options.scenario ==
'HeavyIons':
1503 self.renameInputTagsInSequence(
"SimL1Emulator",
"rawDataCollector",
"rawDataRepacker")
1504 self.scheduleSequence(
'SimL1Emulator',
'L1RePack_step')
1506 print "L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported
1511 """ Enrich the schedule with the HLT simulation step""" 1513 print "no specification of the hlt menu has been given, should never happen" 1514 raise Exception(
'no HLT sequence provided')
1518 from Configuration.HLT.autoHLT
import autoHLT
1521 sequence = autoHLT[key]
1523 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1529 if self._options.scenario ==
'HeavyIons':
1530 optionsForHLT[
'type'] =
'HIon' 1532 optionsForHLT[
'type'] =
'GRun' 1533 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in optionsForHLT.iteritems())
1534 if sequence ==
'run,fromSource':
1535 if hasattr(self.process.source,
'firstRun'):
1536 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1537 elif hasattr(self.process.source,
'setRunNumber'):
1538 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1540 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1542 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1546 if self._options.isMC:
1547 self._options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1549 if self._options.name !=
'HLT':
1550 self.additionalCommands.append(
'from HLTrigger.Configuration.CustomConfigs import ProcessName')
1551 self.additionalCommands.append(
'process = ProcessName(process)')
1552 self.additionalCommands.append(
'')
1553 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1556 self.schedule.append(self.process.HLTSchedule)
1557 [self.blacklist_paths.append(path)
for path
in self.process.HLTSchedule
if isinstance(path,(cms.Path,cms.EndPath))]
1560 if self._options.fast:
1561 if not hasattr(self.process,
'HLTEndSequence'):
1562 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1567 seqReco=sequence.split(
',')[1]
1568 seqDigi=sequence.split(
',')[0]
1570 print "RAW2RECO requires two specifications",sequence,
"insufficient" 1584 self.
loadAndRemember(
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1586 for filt
in allMetFilterPaths:
1587 self.schedule.append(getattr(self.process,
'Flag_'+filt))
1590 ''' Enrich the schedule with L1 HW validation ''' 1593 print '\n\n\n DEPRECATED this has no action \n\n\n' 1597 ''' Enrich the schedule with L1 reconstruction ''' 1603 ''' Enrich the schedule with L1 reconstruction ''' 1609 ''' Enrich the schedule with a user defined filter sequence ''' 1611 filterConfig=self.load(sequence.split(
'.')[0])
1612 filterSeq=sequence.split(
'.')[-1]
1620 label=visitee.label()
1628 getattr(self.process,filterSeq).
visit( expander )
1629 self._options.inlineObjets+=
','+expander.inliner
1630 self._options.inlineObjets+=
','+filterSeq
1633 self.scheduleSequence(filterSeq,
'filtering_step')
1634 self.nextScheduleIsConditional=
True 1636 self.productionFilterSequence = filterSeq
1641 ''' Enrich the schedule with reconstruction ''' 1647 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1648 if not self._options.fast:
1649 print "ERROR: this step is only implemented for FastSim" 1652 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1656 ''' Enrich the schedule with PAT ''' 1659 self.labelsToAssociate.append(
'patTask')
1660 if not self._options.runUnscheduled:
1661 raise Exception(
"MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1662 if self._options.isData:
1663 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1665 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1666 if self._options.fast:
1667 self._options.customisation_file_unsch.insert(1,
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff.miniAOD_customizeMETFiltersFastSim")
1669 if self._options.hltProcess:
1670 if len(self._options.customise_commands) > 1:
1671 self._options.customise_commands = self._options.customise_commands +
" \n" 1672 self._options.customise_commands = self._options.customise_commands +
"process.patTrigger.processName = \""+self._options.hltProcess+
"\"\n" 1673 self._options.customise_commands = self._options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1680 ''' Enrich the schedule with event interpretation ''' 1681 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1682 if sequence
in EventInterpretation:
1683 self.EIDefaultCFF = EventInterpretation[sequence]
1684 sequence =
'EIsequence' 1686 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1687 self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1688 self.scheduleSequence(sequence.split(
'.')[-1],
'eventinterpretaion_step')
1692 ''' Enrich the schedule with skimming fragments''' 1694 sequence = sequence.split(
'.')[-1]
1696 skimlist=sequence.split(
'+')
1698 from Configuration.Skimming.autoSkim
import autoSkim
1702 for skim
in skimConfig.__dict__:
1703 skimstream = getattr(skimConfig,skim)
1704 if isinstance(skimstream,cms.Path):
1706 self.blacklist_paths.append(skimstream)
1707 if (
not isinstance(skimstream,cms.FilteredStream)):
1709 shortname = skim.replace(
'SKIMStream',
'')
1710 if (sequence==
"all"):
1712 elif (shortname
in skimlist):
1715 if self._options.datatier==
'DQM':
1716 self.process.load(self.EVTCONTDefaultCFF)
1717 skimstreamDQM = cms.FilteredStream(
1718 responsible = skimstream.responsible,
1719 name = skimstream.name+
'DQM',
1720 paths = skimstream.paths,
1721 selectEvents = skimstream.selectEvents,
1722 content = self._options.datatier+
'EventContent',
1723 dataTier = cms.untracked.string(self._options.datatier)
1726 for i
in range(skimlist.count(shortname)):
1727 skimlist.remove(shortname)
1731 if (skimlist.__len__()!=0
and sequence!=
"all"):
1732 print 'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist)
1733 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1736 ''' Enrich the schedule with a user defined sequence ''' 1742 """ Enrich the schedule with the postreco step """ 1749 print sequence,
"in preparing validation" 1751 from Validation.Configuration.autoValidation
import autoValidation
1753 sequence=sequence.split(
'.')[-1]
1754 if sequence.find(
',')!=-1:
1755 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1756 valSeqName=sequence.split(
',')[1].
split(
'+')
1761 prevalSeqName=sequence.split(
'+')
1762 valSeqName=sequence.split(
'+')
1768 postfix=
'_'+sequence
1769 prevalSeqName=[
'prevalidation'+postfix]
1770 valSeqName=[
'validation'+postfix]
1771 if not hasattr(self.process,valSeqName[0]):
1773 valSeqName=[sequence]
1784 if (
'HLT' in self.stepMap
and not self._options.fast)
or self._options.hltProcess:
1785 for s
in valSeqName+prevalSeqName:
1788 for (i,s)
in enumerate(prevalSeqName):
1790 setattr(self.process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1791 self.schedule.append(getattr(self.process,
'prevalidation_step%s'%NFI(i)))
1793 for (i,s)
in enumerate(valSeqName):
1794 setattr(self.process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1795 self.schedule.append(getattr(self.process,
'validation_step%s'%NFI(i)))
1798 if 'PAT' in self.stepMap
and not 'RECO' in self.stepMap:
1801 if not 'DIGI' in self.stepMap
and not self._options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1802 if self._options.restoreRNDSeeds==
False and not self._options.restoreRNDSeeds==
True:
1803 self._options.restoreRNDSeeds=
True 1805 if not 'DIGI' in self.stepMap
and not self._options.fast:
1809 self._options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1811 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1813 for (i,s)
in enumerate(valSeqName):
1814 getattr(self.process,
'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,
'validation_step%s'%NFI(i))._seq
1820 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1821 It will climb down within PSets, VPSets and VInputTags to find its target""" 1822 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1829 if isinstance(pset, cms._Parameterizable):
1830 for name
in pset.parameters_().
keys():
1836 value = getattr(pset,name)
1837 type = value.pythonTypeName()
1838 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1839 self.
doIt(value,base+
"."+name)
1840 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1841 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1842 elif type
in (
'cms.string',
'cms.untracked.string'):
1844 if self.
_verbose:
print "set string process name %s.%s %s ==> %s"% (base, name, value, self.
_paramReplace)
1846 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1847 for (i,n)
in enumerate(value):
1848 if not isinstance(n, cms.InputTag):
1852 if self.
_verbose:
print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self.
_paramReplace)
1855 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1856 for (i,n)
in enumerate(value):
1859 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1861 if self.
_verbose:
print "set process name %s.%s %s ==> %s " % (base, name, value, self.
_paramReplace)
1862 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1867 label = visitee.label()
1868 except AttributeError:
1869 label =
'<Module not in a Process>' 1871 label =
'other execption' 1872 self.
doIt(visitee, label)
1879 print "Replacing all InputTag %s => %s"%(oldT,newT)
1882 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1883 if not loadMe
in self.additionalCommands:
1884 self.additionalCommands.append(loadMe)
1885 self.additionalCommands.append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1889 if self._options.hltProcess:
1890 proc=self._options.hltProcess
1892 proc=self.process.name_()
1893 if proc==HLTprocess:
return 1895 print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1897 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1898 self.additionalCommands.append(
'from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1899 self.additionalCommands.append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1905 while '@' in repr(seqList)
and level<maxLevel:
1907 for specifiedCommand
in seqList:
1908 if specifiedCommand.startswith(
'@'):
1909 location=specifiedCommand[1:]
1910 if not location
in mapping:
1911 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1912 mappedTo=mapping[location]
1914 mappedTo=mappedTo[index]
1915 seqList.remove(specifiedCommand)
1916 seqList.extend(mappedTo.split(
'+'))
1919 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1925 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1926 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1927 from DQMOffline.Configuration.autoDQM
import autoDQM
1931 if len(set(sequenceList))!=len(sequenceList):
1932 sequenceList=
list(set(sequenceList))
1933 print "Duplicate entries for DQM:, using",sequenceList
1935 pathName=
'dqmoffline_step' 1936 for (i,sequence)
in enumerate(sequenceList):
1938 pathName=
'dqmoffline_%d_step'%(i)
1940 if 'HLT' in self.stepMap.keys()
or self._options.hltProcess:
1943 setattr(self.process,pathName, cms.EndPath( getattr(self.process,sequence ) ) )
1944 self.schedule.append(getattr(self.process,pathName))
1946 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1948 getattr(self.process,pathName).
insert(0,self.process.genstepfilter)
1950 pathName=
'dqmofflineOnPAT_step' 1951 for (i,sequence)
in enumerate(postSequenceList):
1953 pathName=
'dqmofflineOnPAT_%d_step'%(i)
1955 setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1956 self.schedule.append(getattr(self.process,pathName))
1959 """ Enrich the process with harvesting step """ 1960 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self._options.harvesting+
'_cff' 1964 sequence = sequence.split(
'.')[-1]
1967 harvestingList = sequence.split(
"+")
1968 from DQMOffline.Configuration.autoDQM
import autoDQM
1969 from Validation.Configuration.autoValidation
import autoValidation
1971 combined_mapping = copy.deepcopy( autoDQM )
1972 combined_mapping.update( autoValidation )
1973 self.
expandMapping(harvestingList,combined_mapping,index=-1)
1975 if len(set(harvestingList))!=len(harvestingList):
1976 harvestingList=
list(set(harvestingList))
1977 print "Duplicate entries for HARVESTING, using",harvestingList
1979 for name
in harvestingList:
1980 if not name
in harvestingConfig.__dict__:
1981 print name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1983 harvestingstream = getattr(harvestingConfig,name)
1984 if isinstance(harvestingstream,cms.Path):
1985 self.schedule.append(harvestingstream)
1986 self.blacklist_paths.append(harvestingstream)
1987 if isinstance(harvestingstream,cms.Sequence):
1988 setattr(self.process,name+
"_step",cms.Path(harvestingstream))
1989 self.schedule.append(getattr(self.process,name+
"_step"))
1995 """ Enrich the process with AlCaHarvesting step """ 1997 sequence=sequence.split(
".")[-1]
2000 harvestingList = sequence.split(
"+")
2004 from Configuration.AlCa.autoPCL
import autoPCL
2007 for name
in harvestingConfig.__dict__:
2008 harvestingstream = getattr(harvestingConfig,name)
2009 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2010 self.schedule.append(harvestingstream)
2011 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2012 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2013 harvestingList.remove(name)
2015 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2016 self.schedule.append(lastStep)
2018 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2019 print "The following harvesting could not be found : ", harvestingList
2020 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2030 self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2031 self.schedule.append(self.process.reconstruction)
2035 """ Add useful info for the production. """ 2036 self.process.configurationMetadata=cms.untracked.PSet\
2037 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2038 name=cms.untracked.string(
"Applications"),
2039 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2042 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2046 """ Prepare the configuration string and add missing pieces.""" 2058 outputModuleCfgCode=
"" 2059 if not 'HARVESTING' in self.stepMap.keys()
and not 'ALCAHARVEST' in self.stepMap.keys()
and not 'ALCAOUTPUT' in self.stepMap.keys()
and self.with_output:
2064 self.pythonCfgCode =
"# Auto generated configuration file\n" 2065 self.pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2066 self.pythonCfgCode +=
"# with command line options: "+self._options.arguments+
'\n' 2067 self.pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2068 if hasattr(self.
_options,
"era")
and self._options.era :
2069 self.pythonCfgCode +=
"from Configuration.StandardSequences.Eras import eras\n\n" 2070 self.pythonCfgCode +=
"process = cms.Process('"+self.process.name_()+
"'" 2072 for requestedEra
in self._options.era.split(
",") :
2073 self.pythonCfgCode +=
",eras."+requestedEra
2074 self.pythonCfgCode +=
")\n\n" 2076 self.pythonCfgCode +=
"process = cms.Process('"+self.process.name_()+
"')\n\n" 2078 self.pythonCfgCode +=
"# import of standard configurations\n" 2079 for module
in self.imports:
2080 self.pythonCfgCode += (
"process.load('"+module+
"')\n")
2083 if not hasattr(self.process,
"configurationMetadata"):
2087 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2089 self.pythonCfgCode +=
"\n" 2090 for comment,object
in self.addedObjects:
2092 self.pythonCfgCode +=
"\n# "+comment+
"\n" 2093 self.pythonCfgCode +=
dumpPython(self.process,object)
2096 self.pythonCfgCode +=
"\n# Output definition\n" 2097 self.pythonCfgCode += outputModuleCfgCode
2100 self.pythonCfgCode +=
"\n# Additional output definition\n" 2102 nl=self.additionalOutputs.keys()
2105 output = self.additionalOutputs[name]
2106 self.pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2107 tmpOut = cms.EndPath(output)
2108 setattr(self.process,name+
'OutPath',tmpOut)
2109 self.schedule.append(tmpOut)
2112 self.pythonCfgCode +=
"\n# Other statements\n" 2113 for command
in self.additionalCommands:
2114 self.pythonCfgCode += command +
"\n" 2117 for object
in self._options.inlineObjets.split(
','):
2120 if not hasattr(self.process,object):
2121 print 'cannot inline -'+object+
'- : not known' 2123 self.pythonCfgCode +=
'\n' 2124 self.pythonCfgCode +=
dumpPython(self.process,object)
2127 self.pythonCfgCode +=
"\n# Path and EndPath definitions\n" 2128 for path
in self.process.paths:
2129 if getattr(self.process,path)
not in self.blacklist_paths:
2130 self.pythonCfgCode +=
dumpPython(self.process,path)
2132 for endpath
in self.process.endpaths:
2133 if getattr(self.process,endpath)
not in self.blacklist_paths:
2134 self.pythonCfgCode +=
dumpPython(self.process,endpath)
2137 self.pythonCfgCode +=
"\n# Schedule definition\n" 2138 result =
"process.schedule = cms.Schedule(" 2141 self.process.schedule = cms.Schedule()
2142 for item
in self.schedule:
2143 if not isinstance(item, cms.Schedule):
2144 self.process.schedule.append(item)
2146 self.process.schedule.extend(item)
2148 if hasattr(self.process,
"HLTSchedule"):
2149 beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2150 afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2151 pathNames = [
'process.'+p.label_()
for p
in beforeHLT]
2152 result +=
','.
join(pathNames)+
')\n' 2153 result +=
'process.schedule.extend(process.HLTSchedule)\n' 2154 pathNames = [
'process.'+p.label_()
for p
in afterHLT]
2155 result +=
'process.schedule.extend(['+
','.
join(pathNames)+
'])\n' 2157 pathNames = [
'process.'+p.label_()
for p
in self.schedule]
2158 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2160 self.pythonCfgCode += result
2162 for labelToAssociate
in self.labelsToAssociate:
2163 self.process.schedule.associate(getattr(self.process, labelToAssociate))
2164 self.pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2168 self.pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2169 self.pythonCfgCode+=
"associatePatAlgosToolsTask(process)\n" 2171 if self._options.nThreads
is not "1":
2172 self.pythonCfgCode +=
"\n" 2173 self.pythonCfgCode +=
"#Setup FWK for multithreaded\n" 2174 self.pythonCfgCode +=
"process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+
")\n" 2175 self.pythonCfgCode +=
"process.options.numberOfStreams=cms.untracked.uint32(0)\n" 2177 if self._options.isRepacked:
2178 self.pythonCfgCode +=
"\n" 2179 self.pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2180 self.pythonCfgCode +=
"MassReplaceInputTag(process)\n" 2181 MassReplaceInputTag(self.process)
2184 if self.productionFilterSequence:
2185 self.pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2186 self.pythonCfgCode +=
'for path in process.paths:\n' 2187 if len(self.conditionalPaths):
2188 self.pythonCfgCode +=
'\tif not path in %s: continue\n'%
str(self.conditionalPaths)
2189 if len(self.excludedPaths):
2190 self.pythonCfgCode +=
'\tif path in %s: continue\n'%
str(self.excludedPaths)
2191 self.pythonCfgCode +=
'\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2192 pfs = getattr(self.process,self.productionFilterSequence)
2193 for path
in self.process.paths:
2194 if not path
in self.conditionalPaths:
continue 2195 if path
in self.excludedPaths:
continue 2196 getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2202 if self._options.runUnscheduled:
2205 self.pythonCfgCode+=
"#do not add changes to your config after this point (unless you know what you are doing)\n" 2206 self.pythonCfgCode+=
"from FWCore.ParameterSet.Utilities import convertToUnscheduled\n" 2207 self.pythonCfgCode+=
"process=convertToUnscheduled(process)\n" 2209 from FWCore.ParameterSet.Utilities
import convertToUnscheduled
2220 self.pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2221 self.pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2222 self.pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2223 self.pythonCfgCode +=
"# End adding early deletion\n" 2224 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2230 if self._options.io:
2232 if not self._options.io.endswith(
'.io'): self._option.io+=
'.io' 2233 io=open(self._options.io,
'w')
2235 if hasattr(self.process.source,
"fileNames"):
2236 if len(self.process.source.fileNames.value()):
2237 ioJson[
'primary']=self.process.source.fileNames.value()
2238 if hasattr(self.process.source,
"secondaryFileNames"):
2239 if len(self.process.source.secondaryFileNames.value()):
2240 ioJson[
'secondary']=self.process.source.secondaryFileNames.value()
2241 if self._options.pileup_input
and (self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:')):
2242 ioJson[
'pileup']=self._options.pileup_input[4:]
2243 for (o,om)
in self.process.outputModules_().
items():
2244 ioJson[o]=om.fileName.value()
2245 ioJson[
'GT']=self.process.GlobalTag.globaltag.value()
2246 if self.productionFilterSequence:
2247 ioJson[
'filter']=self.productionFilterSequence
2249 io.write(json.dumps(ioJson))
def load(self, includeFile)
def filesFromOption(self)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def prepare_L1REPACK(self, sequence=None)
def finalizeFastSimHLT(self)
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def doNotInlineEventContent(instance, label="cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)")
event content
def customiseEarlyDelete(process)
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_DIGIPREMIX(self, sequence=None)
def prepare_RECO(self, sequence="reconstruction")
put the filtering path in the schedule
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def addCustomise(self, unsch=0)
def prepare_DIGIPREMIX_S2(self, sequence=None)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
now for a useful command
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, sequence=None)
def convertToUnscheduled(proc)
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
inliner
needs to be in reverse order
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
def prepare_ALCAHARVEST(self, sequence=None)
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def doIt(self, pset, base)
def completeInputCommand(self)
add the corresponding input content
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def addStandardSequences(self)
def prepare_PAT(self, sequence="miniAOD")