3 __version__ =
"$Revision: 1.19 $" 4 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 6 import FWCore.ParameterSet.Config
as cms
7 from FWCore.ParameterSet.Modules
import _Module
11 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
16 from subprocess
import Popen,PIPE
17 import FWCore.ParameterSet.DictTypes
as DictTypes
23 defaultOptions.datamix =
'DataOnSim' 24 defaultOptions.isMC=
False 25 defaultOptions.isData=
True 26 defaultOptions.step=
'' 27 defaultOptions.pileup=
'NoPileUp' 28 defaultOptions.pileup_input =
None 29 defaultOptions.pileup_dasoption =
'' 30 defaultOptions.geometry =
'SimDB' 31 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
32 defaultOptions.magField =
'' 33 defaultOptions.conditions =
None 34 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
35 defaultOptions.harvesting=
'AtRunEnd' 36 defaultOptions.gflash =
False 37 defaultOptions.number = -1
38 defaultOptions.number_out =
None 39 defaultOptions.arguments =
"" 40 defaultOptions.name =
"NO NAME GIVEN" 41 defaultOptions.evt_type =
"" 42 defaultOptions.filein =
"" 43 defaultOptions.dasquery=
"" 44 defaultOptions.dasoption=
"" 45 defaultOptions.secondfilein =
"" 46 defaultOptions.customisation_file = []
47 defaultOptions.customisation_file_unsch = []
48 defaultOptions.customise_commands =
"" 49 defaultOptions.inline_custom=
False 50 defaultOptions.particleTable =
'pythiapdt' 51 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
52 defaultOptions.dirin =
'' 53 defaultOptions.dirout =
'' 54 defaultOptions.filetype =
'EDM' 55 defaultOptions.fileout =
'output.root' 56 defaultOptions.filtername =
'' 57 defaultOptions.lazy_download =
False 58 defaultOptions.custom_conditions =
'' 59 defaultOptions.hltProcess =
'' 60 defaultOptions.eventcontent =
None 61 defaultOptions.datatier =
None 62 defaultOptions.inlineEventContent =
True 63 defaultOptions.inlineObjets =
'' 64 defaultOptions.hideGen=
False 65 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
66 defaultOptions.beamspot=
None 67 defaultOptions.outputDefinition =
'' 68 defaultOptions.inputCommands =
None 69 defaultOptions.outputCommands =
None 70 defaultOptions.inputEventContent =
'' 71 defaultOptions.dropDescendant =
False 72 defaultOptions.relval =
None 73 defaultOptions.profile =
None 74 defaultOptions.isRepacked =
False 75 defaultOptions.restoreRNDSeeds =
False 76 defaultOptions.donotDropOnInput =
'' 77 defaultOptions.python_filename =
'' 78 defaultOptions.io=
None 79 defaultOptions.lumiToProcess=
None 80 defaultOptions.fast=
False 81 defaultOptions.runsAndWeightsForMC =
None 82 defaultOptions.runsScenarioForMC =
None 83 defaultOptions.runUnscheduled =
False 84 defaultOptions.timeoutOutput =
False 85 defaultOptions.nThreads =
'1' 89 theObject = getattr(process,name)
90 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
91 return "process."+name+
" = " + theObject.dumpPython(
"process")
92 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
93 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 95 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 98 import FWCore.ParameterSet.Config
as cms
101 for line
in open(fileName,
'r'): 102 if line.count(
".root")>=2:
104 entries=line.replace(
"\n",
"").
split()
105 if not entries[0]
in prim:
106 prim.append(entries[0])
107 if not entries[1]
in sec:
108 sec.append(entries[1])
109 elif (line.find(
".root")!=-1):
110 entry=line.replace(
"\n",
"")
111 if not entry
in prim:
114 if not hasattr(s,
"fileNames"):
115 s.fileNames=cms.untracked.vstring(prim)
117 s.fileNames.extend(prim)
119 if not hasattr(s,
"secondaryFileNames"):
120 s.secondaryFileNames=cms.untracked.vstring(sec)
122 s.secondaryFileNames.extend(sec)
123 print "found files: ",prim
125 raise Exception(
"There are not files in input from the file list")
127 print "found parent files:",sec
132 import FWCore.ParameterSet.Config
as cms
135 print "the query is",query
138 while eC!=0
and count<3:
140 print 'Sleeping, then retrying DAS' 142 p = Popen(
'das_client %s --query "%s"'%(option,query), stdout=PIPE,shell=
True)
144 tupleP = os.waitpid(p.pid, 0)
148 print "DAS succeeded after",count,
"attempts",eC
150 print "DAS failed 3 times- I give up" 151 for line
in pipe.split(
'\n'):
152 if line.count(
".root")>=2:
154 entries=line.replace(
"\n",
"").
split()
155 if not entries[0]
in prim:
156 prim.append(entries[0])
157 if not entries[1]
in sec:
158 sec.append(entries[1])
159 elif (line.find(
".root")!=-1):
160 entry=line.replace(
"\n",
"")
161 if not entry
in prim:
164 if not hasattr(s,
"fileNames"):
165 s.fileNames=cms.untracked.vstring(prim)
167 s.fileNames.extend(prim)
169 if not hasattr(s,
"secondaryFileNames"):
170 s.secondaryFileNames=cms.untracked.vstring(sec)
172 s.secondaryFileNames.extend(sec)
173 print "found files: ",prim
175 print "found parent files:",sec
178 def anyOf(listOfKeys,dict,opt=None):
187 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
190 """The main building routines """ 192 def __init__(self, options, process = None, with_output = False, with_input = False ):
193 """options taken from old cmsDriver and optparse """ 195 options.outfile_name = options.dirout+options.fileout
199 if self._options.isData
and options.isMC:
200 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
205 if 'ENDJOB' in self._options.step:
206 if (hasattr(self.
_options,
"outputDefinition")
and \
207 self._options.outputDefinition !=
'' and \
208 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self._options.outputDefinition)))
or \
209 (hasattr(self.
_options,
"datatier")
and \
210 self._options.datatier
and \
211 'DQMIO' in self._options.datatier):
212 print "removing ENDJOB from steps since not compatible with DQMIO dataTier" 213 self._options.step=self._options.step.replace(
',ENDJOB',
'')
218 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
221 for step
in self._options.step.split(
","):
222 if step==
'':
continue 223 stepParts = step.split(
":")
224 stepName = stepParts[0]
225 if stepName
not in stepList
and not stepName.startswith(
're'):
226 raise ValueError(
"Step "+stepName+
" unknown")
227 if len(stepParts)==1:
228 self.stepMap[stepName]=
"" 229 elif len(stepParts)==2:
230 self.stepMap[stepName]=stepParts[1].
split(
'+')
231 elif len(stepParts)==3:
232 self.stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
234 raise ValueError(
"Step definition "+step+
" invalid")
235 self.stepKeys.append(stepName)
239 self.with_output = with_output
240 if hasattr(self.
_options,
"no_output_flag")
and self._options.no_output_flag:
241 self.with_output =
False 242 self.with_input = with_input
244 self.process = cms.Process(self._options.name)
246 self.process = process
249 self.schedule =
list()
255 self.additionalCommands = []
257 self.blacklist_paths = []
258 self.addedObjects = []
259 self.additionalOutputs = {}
261 self.productionFilterSequence =
None 262 self.labelsToAssociate=[]
263 self.nextScheduleIsConditional=
False 264 self.conditionalPaths=[]
265 self.excludedPaths=[]
270 Function to add the igprof profile service so that you can dump in the middle 273 profileOpts = self._options.profile.split(
':')
275 profilerInterval = 100
276 profilerFormat =
None 277 profilerJobFormat =
None 283 startEvent = profileOpts.pop(0)
284 if not startEvent.isdigit():
285 raise Exception(
"%s is not a number" % startEvent)
286 profilerStart =
int(startEvent)
288 eventInterval = profileOpts.pop(0)
289 if not eventInterval.isdigit():
290 raise Exception(
"%s is not a number" % eventInterval)
291 profilerInterval =
int(eventInterval)
293 profilerFormat = profileOpts.pop(0)
296 if not profilerFormat:
297 profilerFormat =
"%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace(
"_cfi",
""),
299 self._options.pileup,
300 self._options.conditions,
301 self._options.datatier,
302 self._options.profileTypeLabel)
303 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
304 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
305 elif not profilerJobFormat:
306 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 308 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
311 includeFile = includeFile.replace(
'/',
'.')
312 self.process.load(includeFile)
313 return sys.modules[includeFile]
316 """helper routine to load am memorize imports""" 319 includeFile = includeFile.replace(
'/',
'.')
320 self.imports.append(includeFile)
321 self.process.load(includeFile)
322 return sys.modules[includeFile]
325 """helper routine to remember replace statements""" 326 self.additionalCommands.append(command)
327 if not command.strip().startswith(
"#"):
330 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
334 if 'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys():
335 self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring(
'ProductNotFound'),fileMode = cms.untracked.string(
'FULLMERGE'))
337 self.process.options = cms.untracked.PSet( )
339 self.addedObjects.append((
"",
"options"))
341 if self._options.lazy_download:
342 self.process.AdaptorConfig = cms.Service(
"AdaptorConfig",
343 stats = cms.untracked.bool(
True),
344 enable = cms.untracked.bool(
True),
345 cacheHint = cms.untracked.string(
"lazy-download"),
346 readHint = cms.untracked.string(
"read-ahead-buffered")
348 self.addedObjects.append((
"Setup lazy download",
"AdaptorConfig"))
353 if self._options.profile:
355 self.process.IgProfService = cms.Service(
"IgProfService",
356 reportFirstEvent = cms.untracked.int32(start),
357 reportEventInterval = cms.untracked.int32(interval),
358 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
359 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
360 self.addedObjects.append((
"Setup IGProf Service for profiling",
"IgProfService"))
363 """Here we decide how many evts will be processed""" 364 self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(
int(self._options.number)))
365 if self._options.number_out:
366 self.process.maxEvents.output = cms.untracked.int32(
int(self._options.number_out))
367 self.addedObjects.append((
"",
"maxEvents"))
370 """Here the source is built. Priority: file, generator""" 371 self.addedObjects.append((
"Input source",
"source"))
374 for entry
in self._options.filein.split(
','):
376 if entry.startswith(
"filelist:"):
378 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
379 filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
381 self.process.source.fileNames.append(self._options.dirin+entry)
382 if self._options.secondfilein:
383 if not hasattr(self.process.source,
"secondaryFileNames"):
384 raise Exception(
"--secondfilein not compatible with "+self._options.filetype+
"input type")
385 for entry
in self._options.secondfilein.split(
','):
387 if entry.startswith(
"filelist:"):
388 self.process.source.secondaryFileNames.extend((
filesFromList(entry[9:]))[0])
389 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
390 self.process.source.secondaryFileNames.extend((
filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
392 self.process.source.secondaryFileNames.append(self._options.dirin+entry)
394 if self._options.filein
or self._options.dasquery:
395 if self._options.filetype ==
"EDM":
396 self.process.source=cms.Source(
"PoolSource",
397 fileNames = cms.untracked.vstring(),
398 secondaryFileNames= cms.untracked.vstring())
400 elif self._options.filetype ==
"DAT":
401 self.process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
403 elif self._options.filetype ==
"LHE":
404 self.process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
405 if self._options.filein.startswith(
"lhe:"):
407 args=self._options.filein.split(
':')
409 print 'LHE input from article ',article
410 location=
'/store/lhe/' 412 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
413 for line
in textOfFiles:
414 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
415 self.process.source.fileNames.append(location+article+
'/'+fileName)
418 print 'Issue to load LHE files, please check and try again.' 421 self.process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
426 elif self._options.filetype ==
"DQM":
427 self.process.source=cms.Source(
"DQMRootSource",
428 fileNames = cms.untracked.vstring())
431 elif self._options.filetype ==
"DQMDAQ":
433 self.process.source=cms.Source(
"DQMStreamerReader")
436 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
437 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
439 if self._options.dasquery!=
'':
440 self.process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
441 filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
443 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
444 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
447 if 'GEN' in self.stepMap.keys():
448 if self._options.inputCommands:
449 self._options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 451 self._options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 453 if self.process.source
and self._options.inputCommands:
454 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
455 for command
in self._options.inputCommands.split(
','):
457 command = command.strip()
458 if command==
'':
continue 459 self.process.source.inputCommands.append(command)
460 if not self._options.dropDescendant:
461 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
463 if self._options.lumiToProcess:
464 import FWCore.PythonUtilities.LumiList
as LumiList
465 self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange(
LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().
split(
',') )
467 if 'GEN' in self.stepMap.keys()
or 'LHE' in self.stepMap
or (
not self._options.filein
and hasattr(self.
_options,
"evt_type")):
468 if self.process.source
is None:
469 self.process.source=cms.Source(
"EmptySource")
472 self.runsAndWeights=
None 473 if self._options.runsAndWeightsForMC
or self._options.runsScenarioForMC :
474 if not self._options.isMC :
475 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
476 if self._options.runsAndWeightsForMC:
477 self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
479 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
480 if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
481 __import__(RunsAndWeights[self._options.runsScenarioForMC])
482 self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
484 self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
486 if self.runsAndWeights:
487 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
489 self.additionalCommands.append(
'import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
490 self.additionalCommands.append(
'ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
495 """ Add output module to the process """ 497 if self._options.outputDefinition:
498 if self._options.datatier:
499 print "--datatier & --eventcontent options ignored" 502 outList = eval(self._options.outputDefinition)
503 for (id,outDefDict)
in enumerate(outList):
504 outDefDictStr=outDefDict.__str__()
505 if not isinstance(outDefDict,dict):
506 raise Exception(
"--output needs to be passed a list of dict"+self._options.outputDefinition+
" is invalid")
508 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
511 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
512 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
513 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
514 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
515 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
517 if not theModuleLabel:
518 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
519 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
520 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 522 for name
in tryNames:
523 if not hasattr(self.process,name):
526 if not theModuleLabel:
527 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
529 defaultFileName=self._options.outfile_name
531 defaultFileName=self._options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
533 theFileName=self._options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
534 if not theFileName.endswith(
'.root'):
537 if len(outDefDict.keys()):
538 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
539 if theStreamType==
'DQMIO': theStreamType=
'DQM' 540 if theStreamType==
'ALL':
541 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
543 theEventContent = getattr(self.process, theStreamType+
"EventContent")
547 if theStreamType==
'ALCARECO' and not theFilterName:
548 theFilterName=
'StreamALCACombined' 551 CppType=
'PoolOutputModule' 552 if self._options.timeoutOutput:
553 CppType=
'TimeoutPoolOutputModule' 554 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 555 output = cms.OutputModule(CppType,
556 theEventContent.clone(),
557 fileName = cms.untracked.string(theFileName),
558 dataset = cms.untracked.PSet(
559 dataTier = cms.untracked.string(theTier),
560 filterName = cms.untracked.string(theFilterName))
562 if not theSelectEvent
and hasattr(self.process,
'generation_step')
and theStreamType!=
'LHE':
563 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
564 if not theSelectEvent
and hasattr(self.process,
'filtering_step'):
565 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
567 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
570 if not hasattr(output,
'SelectEvents'):
571 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
572 for alca
in self.AlCaPaths:
573 output.SelectEvents.SelectEvents.extend(getattr(self.process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
576 if hasattr(self.process,theModuleLabel):
577 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
579 setattr(self.process,theModuleLabel,output)
580 outputModule=getattr(self.process,theModuleLabel)
581 setattr(self.process,theModuleLabel+
'_step',cms.EndPath(outputModule))
582 path=getattr(self.process,theModuleLabel+
'_step')
583 self.schedule.append(path)
585 if not self._options.inlineEventContent
and hasattr(self.process,theStreamType+
"EventContent"):
588 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
589 if theExtraOutputCommands:
590 if not isinstance(theExtraOutputCommands,list):
591 raise Exception(
"extra ouput command in --option must be a list of strings")
592 if hasattr(self.process,theStreamType+
"EventContent"):
593 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
595 outputModule.outputCommands.extend(theExtraOutputCommands)
597 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
602 streamTypes=self._options.eventcontent.split(
',')
603 tiers=self._options.datatier.split(
',')
604 if not self._options.outputDefinition
and len(streamTypes)!=len(tiers):
605 raise Exception(
"number of event content arguments does not match number of datatier arguments")
608 if self._options.step.split(
',')[0].
split(
':')[0] ==
'ALCA':
611 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
612 if streamType==
'':
continue 613 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self._options.step:
continue 614 if streamType==
'DQMIO': streamType=
'DQM' 615 theEventContent = getattr(self.process, streamType+
"EventContent")
617 theFileName=self._options.outfile_name
618 theFilterName=self._options.filtername
620 theFileName=self._options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
621 theFilterName=self._options.filtername
622 CppType=
'PoolOutputModule' 623 if self._options.timeoutOutput:
624 CppType=
'TimeoutPoolOutputModule' 625 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 626 output = cms.OutputModule(CppType,
628 fileName = cms.untracked.string(theFileName),
629 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
630 filterName = cms.untracked.string(theFilterName)
633 if hasattr(self.process,
"generation_step")
and streamType!=
'LHE':
634 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
635 if hasattr(self.process,
"filtering_step"):
636 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
638 if streamType==
'ALCARECO':
639 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
641 if "MINIAOD" in streamType:
642 output.dropMetaData = cms.untracked.string(
'ALL')
643 output.fastCloning= cms.untracked.bool(
False)
644 output.overrideInputFileSplitLevels = cms.untracked.bool(
True)
646 outputModuleName=streamType+
'output' 647 setattr(self.process,outputModuleName,output)
648 outputModule=getattr(self.process,outputModuleName)
649 setattr(self.process,outputModuleName+
'_step',cms.EndPath(outputModule))
650 path=getattr(self.process,outputModuleName+
'_step')
651 self.schedule.append(path)
653 if self._options.outputCommands
and streamType!=
'DQM':
654 for evct
in self._options.outputCommands.split(
','):
655 if not evct:
continue 656 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
658 if not self._options.inlineEventContent:
661 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
663 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
669 Add selected standard sequences to the process 672 if self._options.pileup:
673 pileupSpec=self._options.pileup.split(
',')[0]
676 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
677 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
678 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
682 if '.' in pileupSpec:
683 mixingDict={
'file':pileupSpec}
684 elif pileupSpec.startswith(
'file:'):
685 mixingDict={
'file':pileupSpec[5:]}
688 mixingDict=copy.copy(Mixing[pileupSpec])
689 if len(self._options.pileup.split(
','))>1:
690 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(
',')+1:]))
693 if 'file:' in pileupSpec:
695 self.process.load(mixingDict[
'file'])
696 print "inlining mixing module configuration" 697 self._options.inlineObjets+=
',mix' 699 self.loadAndRemember(mixingDict[
'file'])
701 mixingDict.pop(
'file')
702 if not "DATAMIX" in self.stepMap.keys():
703 if self._options.pileup_input:
704 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
705 mixingDict[
'F']=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
706 elif self._options.pileup_input.startswith(
"filelist:"):
707 mixingDict[
'F']=(
filesFromList(self._options.pileup_input[9:]))[0]
709 mixingDict[
'F']=self._options.pileup_input.split(
',')
711 for command
in specialization:
712 self.executeAndRemember(command)
713 if len(mixingDict)!=0:
714 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
719 if len(self.stepMap):
720 self.loadAndRemember(self.GeometryCFF)
721 if (
'SIM' in self.stepMap
or 'reSIM' in self.stepMap)
and not self._options.fast:
722 self.loadAndRemember(self.SimGeometryCFF)
723 if self.geometryDBLabel:
724 self.executeAndRemember(
'process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
726 print "Geometry option",self._options.geometry,
"unknown." 729 if len(self.stepMap):
730 self.loadAndRemember(self.magFieldCFF)
732 for stepName
in self.stepKeys:
733 stepSpec = self.stepMap[stepName]
734 print "Step:", stepName,
"Spec:",stepSpec
735 if stepName.startswith(
're'):
737 if stepName[2:]
not in self._options.donotDropOnInput:
738 self._options.inputEventContent=
'%s,%s'%(stepName.upper(),self._options.inputEventContent)
739 stepName=stepName[2:]
741 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
742 elif type(stepSpec)==list:
743 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
744 elif type(stepSpec)==tuple:
745 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
747 raise ValueError(
"Invalid step definition")
749 if self._options.restoreRNDSeeds!=
False:
751 if self._options.restoreRNDSeeds==
True:
752 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
754 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
755 if self._options.inputEventContent
or self._options.inputCommands:
756 if self._options.inputCommands:
757 self._options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 759 self._options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 763 if self._options.inputEventContent:
765 def dropSecondDropStar(iec):
776 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
777 for evct
in self._options.inputEventContent.split(
','):
778 if evct==
'':
continue 779 theEventContent = getattr(self.process, evct+
"EventContent")
780 if hasattr(theEventContent,
'outputCommands'):
781 self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
782 if hasattr(theEventContent,
'inputCommands'):
783 self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
785 dropSecondDropStar(self.process.source.inputCommands)
787 if not self._options.dropDescendant:
788 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
794 """Add conditions to the process""" 795 if not self._options.conditions:
return 797 if 'FrontierConditions_GlobalTag' in self._options.conditions:
798 print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line' 799 self._options.conditions = self._options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
801 self.loadAndRemember(self.ConditionsDefaultCFF)
803 self.process.GlobalTag =
GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
804 self.additionalCommands.append(
'from Configuration.AlCa.GlobalTag import GlobalTag')
805 self.additionalCommands.append(
'process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
809 """Include the customise code """ 813 for c
in self._options.customisation_file:
814 custOpt.extend(c.split(
","))
816 for c
in self._options.customisation_file_unsch:
817 custOpt.extend(c.split(
","))
823 raise Exception(
"more than . in the specification:"+opt)
824 fileName=opt.split(
'.')[0]
825 if opt.count(
'.')==0: rest=
'customise' 827 rest=opt.split(
'.')[1]
828 if rest==
'py': rest=
'customise' 830 if fileName
in custMap:
831 custMap[fileName].extend(rest.split(
'+'))
833 custMap[fileName]=rest.split(
'+')
838 final_snippet=
'\n# customisation of the process.\n' 842 allFcn.extend(custMap[opt])
844 if allFcn.count(fcn)!=1:
845 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
849 packageName = f.replace(
".py",
"").
replace(
"/",
".")
850 __import__(packageName)
851 package = sys.modules[packageName]
854 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
856 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 857 if self._options.inline_custom:
858 for line
in file(customiseFile,
'r'): 859 if "import FWCore.ParameterSet.Config" in line:
861 final_snippet += line
863 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
864 for fcn
in custMap[f]:
865 print "customising the process with",fcn,
"from",f
866 if not hasattr(package,fcn):
868 raise Exception(
"config "+f+
" has no function "+fcn)
870 self.process=getattr(package,fcn)(self.process)
872 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
873 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
876 final_snippet +=
'\n# End of customisation functions\n' 882 final_snippet=
'\n# Customisation from command line\n' 883 if self._options.customise_commands:
885 for com
in self._options.customise_commands.split(
'\\n'):
886 com=string.lstrip(com)
888 final_snippet +=
'\n'+com
897 if len(self.stepMap):
899 if self._options.particleTable
not in defaultOptions.particleTableList:
900 print 'Invalid particle table provided. Options are:' 901 print defaultOptions.particleTable
904 if len(self.stepMap):
905 self.
loadAndRemember(
'SimGeneral.HepPDTESSource.'+self._options.particleTable+
'_cfi')
922 self.EIDefaultCFF=
None 923 self.SKIMDefaultCFF=
"Configuration/StandardSequences/Skims_cff" 924 self.POSTRECODefaultCFF=
"Configuration/StandardSequences/PostRecoGenerator_cff" 925 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/Validation_cff" 926 self.L1HwValDefaultCFF =
"Configuration/StandardSequences/L1HwVal_cff" 927 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOffline_cff" 928 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/Harvesting_cff" 929 self.ALCAHARVESTDefaultCFF=
"Configuration/StandardSequences/AlCaHarvesting_cff" 930 self.ENDJOBDefaultCFF=
"Configuration/StandardSequences/EndOfProcess_cff" 931 self.ConditionsDefaultCFF =
"Configuration/StandardSequences/FrontierConditions_GlobalTag_cff" 932 self.CFWRITERDefaultCFF =
"Configuration/StandardSequences/CrossingFrameWriter_cff" 933 self.REPACKDefaultCFF=
"Configuration/StandardSequences/DigiToRaw_Repack_cff" 935 if "DATAMIX" in self.stepMap.keys():
936 self.DATAMIXDefaultCFF=
"Configuration/StandardSequences/DataMixer"+self._options.datamix+
"_cff" 937 if self._options.datamix ==
'PreMix':
938 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiDMPreMix_cff" 942 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 944 if "DIGIPREMIX" in self.stepMap.keys():
945 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/Digi_PreMix_cff" 947 self.
L1EMDefaultCFF=
"Configuration/StandardSequences/SimL1EmulatorPreMix_cff" 949 self.ALCADefaultSeq=
None 950 self.LHEDefaultSeq=
'externalLHEProducer' 951 self.GENDefaultSeq=
'pgen' 952 self.SIMDefaultSeq=
'psim' 953 self.DIGIDefaultSeq=
'pdigi' 954 self.DIGIPREMIXDefaultSeq=
'pdigi' 955 self.DIGIPREMIX_S2DefaultSeq=
'pdigi' 956 self.DATAMIXDefaultSeq=
None 957 self.DIGI2RAWDefaultSeq=
'DigiToRaw' 958 self.HLTDefaultSeq=
'GRun' 959 self.L1DefaultSeq=
None 960 self.L1REPACKDefaultSeq=
'GT' 961 self.HARVESTINGDefaultSeq=
None 962 self.ALCAHARVESTDefaultSeq=
None 963 self.CFWRITERDefaultSeq=
None 964 self.RAW2DIGIDefaultSeq=
'RawToDigi' 965 self.L1RecoDefaultSeq=
'L1Reco' 966 self.L1TrackTriggerDefaultSeq=
'L1TrackTrigger' 967 if self._options.fast
or (
'RAW2DIGI' in self.stepMap
and 'RECO' in self.stepMap):
968 self.RECODefaultSeq=
'reconstruction' 970 self.RECODefaultSeq=
'reconstruction_fromRECO' 972 self.EIDefaultSeq=
'top' 973 self.POSTRECODefaultSeq=
None 974 self.L1HwValDefaultSeq=
'L1HwVal' 975 self.DQMDefaultSeq=
'DQMOffline' 976 self.VALIDATIONDefaultSeq=
'' 977 self.ENDJOBDefaultSeq=
'endOfProcess' 978 self.REPACKDefaultSeq=
'DigiToRawRepack' 979 self.PATDefaultSeq=
'miniAOD' 981 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContent_cff" 983 if not self._options.beamspot:
984 self._options.beamspot=VtxSmearedDefaultKey
987 if self._options.isMC==
True:
989 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 990 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 991 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineMC_cff" 992 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 994 self._options.beamspot =
None 997 if 'reGEN' in self.stepMap:
998 self.GENDefaultSeq=
'fixGenInfo' 1000 if self._options.scenario==
'cosmics':
1001 self._options.pileup=
'Cosmics' 1002 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1003 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1004 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1005 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentCosmics_cff" 1006 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationCosmics_cff" 1007 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmics_cff" 1008 if self._options.isMC==
True:
1009 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmicsMC_cff" 1010 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingCosmics_cff" 1011 self.RECODefaultSeq=
'reconstructionCosmics' 1012 self.DQMDefaultSeq=
'DQMOfflineCosmics' 1014 if self._options.scenario==
'HeavyIons':
1015 if not self._options.beamspot:
1016 self._options.beamspot=VtxSmearedHIDefaultKey
1017 self.HLTDefaultSeq =
'HIon' 1018 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationHeavyIons_cff" 1019 self.VALIDATIONDefaultSeq=
'' 1020 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentHeavyIons_cff" 1021 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1022 self.RECODefaultSeq=
'reconstructionHeavyIons' 1023 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1024 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIons_cff" 1025 self.DQMDefaultSeq=
'DQMOfflineHeavyIons' 1026 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1027 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingHeavyIons_cff" 1028 if self._options.isMC==
True:
1029 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff" 1032 self.RAW2RECODefaultSeq=
','.
join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1034 self.USERDefaultSeq=
'user' 1035 self.USERDefaultCFF=
None 1038 if self._options.isData:
1039 if self._options.magField==defaultOptions.magField:
1040 print "magnetic field option forced to: AutoFromDBCurrent" 1041 self._options.magField=
'AutoFromDBCurrent' 1042 self.magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace(
'.',
'')+
'_cff' 1043 self.magFieldCFF = self.magFieldCFF.replace(
"__",
'_')
1046 self.GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff' 1047 self.geometryDBLabel=
None 1049 if self._options.fast:
1050 if 'start' in self._options.conditions.lower():
1051 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1053 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1056 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1057 if opt
in GeometryConf:
1058 return GeometryConf[opt]
1062 geoms=self._options.geometry.split(
',')
1066 if '/' in geoms[1]
or '_cff' in geoms[1]:
1067 self.GeometryCFF=geoms[1]
1069 self.GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1071 if (geoms[0].startswith(
'DB:')):
1072 self.SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff' 1073 self.geometryDBLabel=geoms[0][3:]
1076 if '/' in geoms[0]
or '_cff' in geoms[0]:
1077 self.SimGeometryCFF=geoms[0]
1079 simGeometry=geoms[0]
1080 if self._options.gflash==
True:
1081 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1083 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1086 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1087 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1089 if self._options.scenario==
'nocoll' or self._options.scenario==
'cosmics':
1090 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1091 self._options.beamspot=
'NoSmear' 1094 if self._options.fast:
1095 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1096 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1097 self.RECOBEFMIXDefaultCFF =
'FastSimulation.Configuration.Reconstruction_BefMix_cff' 1098 self.RECOBEFMIXDefaultSeq =
'reconstruction_befmix' 1099 self.DQMOFFLINEDefaultCFF=
"FastSimulation.Configuration.DQMOfflineMC_cff" 1102 if self._options.pileup==
'default':
1103 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1104 self._options.pileup=MixingDefaultKey
1108 if self._options.isData:
1109 self._options.pileup=
None 1112 self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1117 output = cms.OutputModule(
"PoolOutputModule")
1118 if stream.selectEvents.parameters_().__len__()!=0:
1119 output.SelectEvents = stream.selectEvents
1121 output.SelectEvents = cms.untracked.PSet()
1122 output.SelectEvents.SelectEvents=cms.vstring()
1123 if isinstance(stream.paths,tuple):
1124 for path
in stream.paths:
1125 output.SelectEvents.SelectEvents.append(path.label())
1127 output.SelectEvents.SelectEvents.append(stream.paths.label())
1131 if isinstance(stream.content,str):
1132 evtPset=getattr(self.process,stream.content)
1133 for p
in evtPset.parameters_():
1134 setattr(output,p,getattr(evtPset,p))
1135 if not self._options.inlineEventContent:
1138 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1140 output.outputCommands = stream.content
1143 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1145 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1146 filterName = cms.untracked.string(stream.name))
1148 if self._options.filtername:
1149 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1152 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1154 if workflow
in (
"producers,full"):
1155 if isinstance(stream.paths,tuple):
1156 for path
in stream.paths:
1157 self.schedule.append(path)
1159 self.schedule.append(stream.paths)
1163 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1164 self.additionalOutputs[name] = output
1165 setattr(self.process,name,output)
1167 if workflow ==
'output':
1169 filterList = output.SelectEvents.SelectEvents
1170 for i, filter
in enumerate(filterList):
1171 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1181 if ( len(sequence.split(
'.'))==1 ):
1183 elif ( len(sequence.split(
'.'))==2 ):
1185 sequence=sequence.split(
'.')[1]
1187 print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a" 1188 print sequence,
"not recognized" 1195 for i,s
in enumerate(seq.split(
'*')):
1197 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1199 p=getattr(self.process,prefix)
1200 p+=getattr(self.process, s)
1201 self.schedule.append(getattr(self.process,prefix))
1206 if self.nextScheduleIsConditional:
1207 self.conditionalPaths.append(prefix)
1208 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1209 self.schedule.append(getattr(self.process,prefix))
1211 for i,s
in enumerate(seq.split(
'+')):
1213 setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1214 self.schedule.append(getattr(self.process,sn))
1228 """ Enrich the process with alca streams """ 1230 sequence = sequence.split(
'.')[-1]
1233 alcaList = sequence.split(
"+")
1235 from Configuration.AlCa.autoAlca
import autoAlca
1239 for name
in alcaConfig.__dict__:
1240 alcastream = getattr(alcaConfig,name)
1241 shortName = name.replace(
'ALCARECOStream',
'')
1242 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1243 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1244 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1245 self.AlCaPaths.append(shortName)
1246 if 'DQM' in alcaList:
1247 if not self._options.inlineEventContent
and hasattr(self.process,name):
1248 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1250 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1253 if self._options.hltProcess
or 'HLT' in self.stepMap:
1254 if isinstance(alcastream.paths,tuple):
1255 for path
in alcastream.paths:
1260 for i
in range(alcaList.count(shortName)):
1261 alcaList.remove(shortName)
1264 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1265 path = getattr(alcaConfig,name)
1266 self.schedule.append(path)
1267 alcaList.remove(
'DQM')
1269 if isinstance(alcastream,cms.Path):
1271 self.blacklist_paths.append(alcastream)
1274 if len(alcaList) != 0:
1276 for name
in alcaConfig.__dict__:
1277 alcastream = getattr(alcaConfig,name)
1278 if isinstance(alcastream,cms.FilteredStream):
1279 available.append(name.replace(
'ALCARECOStream',
''))
1280 print "The following alcas could not be found "+
str(alcaList)
1281 print "available ",available
1283 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1288 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1289 print "Loading lhe fragment from",loadFragment
1290 __import__(loadFragment)
1291 self.process.load(loadFragment)
1293 self._options.inlineObjets+=
','+sequence
1295 getattr(self.process,sequence).nEvents =
int(self._options.number)
1298 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1299 self.excludedPaths.append(
"lhe_step")
1300 self.schedule.append( self.process.lhe_step )
1303 """ load the fragment of generator configuration """ 1308 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'')
1310 if not '/' in loadFragment:
1311 loadFragment=
'Configuration.Generator.'+loadFragment
1313 loadFragment=loadFragment.replace(
'/',
'.')
1315 print "Loading generator fragment from",loadFragment
1316 __import__(loadFragment)
1320 if not (self._options.filein
or self._options.dasquery):
1321 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1324 generatorModule=sys.modules[loadFragment]
1325 genModules=generatorModule.__dict__
1328 if self.LHEDefaultSeq
in genModules:
1329 del genModules[self.LHEDefaultSeq]
1331 if self._options.hideGen:
1332 self.loadAndRemember(loadFragment)
1334 self.process.load(loadFragment)
1336 import FWCore.ParameterSet.Modules
as cmstypes
1337 for name
in genModules:
1338 theObject = getattr(generatorModule,name)
1339 if isinstance(theObject, cmstypes._Module):
1340 self._options.inlineObjets=name+
','+self._options.inlineObjets
1341 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1342 self._options.inlineObjets+=
','+name
1344 if sequence == self.GENDefaultSeq
or sequence ==
'pgen_genonly':
1345 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1346 self.productionFilterSequence =
'ProductionFilterSequence' 1347 elif 'generator' in genModules:
1348 self.productionFilterSequence =
'generator' 1350 """ Enrich the schedule with the rest of the generation step """ 1351 self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1352 genSeqName=sequence.split(
'.')[-1]
1356 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1357 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1358 self.loadAndRemember(cffToBeLoaded)
1360 raise Exception(
"VertexSmearing type or beamspot "+self._options.beamspot+
" unknown.")
1362 if self._options.scenario ==
'HeavyIons':
1363 if self._options.pileup==
'HiMixGEN':
1364 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1366 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1368 self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1369 self.schedule.append(self.process.generation_step)
1372 self.executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1374 if 'reGEN' in self.stepMap:
1378 """ Enrich the schedule with the summary of the filter step """ 1380 self.loadAndRemember(
"GeneratorInterface/Core/genFilterSummary_cff")
1381 self.scheduleSequenceAtEnd(
'genFilterSummary',
'genfiltersummary_step')
1385 """ Enrich the schedule with the simulation step""" 1386 self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1387 if not self._options.fast:
1388 if self._options.gflash==
True:
1389 self.loadAndRemember(
"Configuration/StandardSequences/GFlashSIM_cff")
1391 if self._options.magField==
'0T':
1392 self.executeAndRemember(
"process.g4SimHits.UseMagneticField = cms.bool(False)")
1394 if self._options.magField==
'0T':
1395 self.executeAndRemember(
"process.famosSimHits.UseMagneticField = cms.bool(False)")
1397 self.scheduleSequence(sequence.split(
'.')[-1],
'simulation_step')
1401 """ Enrich the schedule with the digitisation step""" 1404 if self._options.gflash==
True:
1405 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1407 if sequence ==
'pdigi_valid':
1408 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1410 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and not self.process.source.type_()==
'EmptySource':
1411 if self._options.inputEventContent==
'':
1412 self._options.inputEventContent=
'REGEN' 1414 self._options.inputEventContent=self._options.inputEventContent+
',REGEN' 1421 """ Enrich the schedule with the digitisation step""" 1426 if sequence ==
'pdigi_valid':
1427 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1429 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1435 """ Enrich the schedule with the digitisation step""" 1436 self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1438 self.loadAndRemember(
"SimGeneral/MixingModule/digi_MixPreMix_cfi")
1441 if sequence ==
'pdigi_valid':
1442 self.executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1444 self.executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1446 self.scheduleSequence(sequence.split(
'.')[-1],
'digitisation_step')
1450 """ Enrich the schedule with the crossing frame writer step""" 1456 """ Enrich the schedule with the digitisation step""" 1460 if self._options.pileup_input:
1462 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
1463 theFiles=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1464 elif self._options.pileup_input.startswith(
"filelist:"):
1465 theFiles= (
filesFromList(self._options.pileup_input[9:]))[0]
1467 theFiles=self._options.pileup_input.split(
',')
1469 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1476 if "DIGIPREMIX" in self.stepMap.keys():
1478 self.
executeAndRemember(
"process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')")
1488 """ Enrich the schedule with the L1 simulation step""" 1489 assert(sequence ==
None)
1495 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1496 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT']
1497 if sequence
in supported:
1498 self.loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1499 if self._options.scenario ==
'HeavyIons':
1500 self.renameInputTagsInSequence(
"SimL1Emulator",
"rawDataCollector",
"rawDataRepacker")
1501 self.scheduleSequence(
'SimL1Emulator',
'L1RePack_step')
1503 print "L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported
1508 """ Enrich the schedule with the HLT simulation step""" 1510 print "no specification of the hlt menu has been given, should never happen" 1511 raise Exception(
'no HLT sequence provided')
1515 from Configuration.HLT.autoHLT
import autoHLT
1518 sequence = autoHLT[key]
1520 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1526 if self._options.scenario ==
'HeavyIons':
1527 optionsForHLT[
'type'] =
'HIon' 1529 optionsForHLT[
'type'] =
'GRun' 1530 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in optionsForHLT.iteritems())
1531 if sequence ==
'run,fromSource':
1532 if hasattr(self.process.source,
'firstRun'):
1533 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1534 elif hasattr(self.process.source,
'setRunNumber'):
1535 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1537 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1539 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1543 if self._options.isMC:
1544 self._options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1546 if self._options.name !=
'HLT':
1547 self.additionalCommands.append(
'from HLTrigger.Configuration.CustomConfigs import ProcessName')
1548 self.additionalCommands.append(
'process = ProcessName(process)')
1549 self.additionalCommands.append(
'')
1550 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1553 self.schedule.append(self.process.HLTSchedule)
1554 [self.blacklist_paths.append(path)
for path
in self.process.HLTSchedule
if isinstance(path,(cms.Path,cms.EndPath))]
1557 if self._options.fast:
1558 if not hasattr(self.process,
'HLTEndSequence'):
1559 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1564 seqReco=sequence.split(
',')[1]
1565 seqDigi=sequence.split(
',')[0]
1567 print "RAW2RECO requires two specifications",sequence,
"insufficient" 1581 self.
loadAndRemember(
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1583 for filt
in allMetFilterPaths:
1584 self.schedule.append(getattr(self.process,
'Flag_'+filt))
1587 ''' Enrich the schedule with L1 HW validation ''' 1590 print '\n\n\n DEPRECATED this has no action \n\n\n' 1594 ''' Enrich the schedule with L1 reconstruction ''' 1600 ''' Enrich the schedule with L1 reconstruction ''' 1606 ''' Enrich the schedule with a user defined filter sequence ''' 1608 filterConfig=self.load(sequence.split(
'.')[0])
1609 filterSeq=sequence.split(
'.')[-1]
1617 label=visitee.label()
1625 getattr(self.process,filterSeq).
visit( expander )
1626 self._options.inlineObjets+=
','+expander.inliner
1627 self._options.inlineObjets+=
','+filterSeq
1630 self.scheduleSequence(filterSeq,
'filtering_step')
1631 self.nextScheduleIsConditional=
True 1633 self.productionFilterSequence = filterSeq
1638 ''' Enrich the schedule with reconstruction ''' 1644 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1645 if not self._options.fast:
1646 print "ERROR: this step is only implemented for FastSim" 1649 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1653 ''' Enrich the schedule with PAT ''' 1656 self.labelsToAssociate.append(
'patTask')
1657 if not self._options.runUnscheduled:
1658 raise Exception(
"MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1659 if self._options.isData:
1660 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1662 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1663 if self._options.fast:
1664 self._options.customisation_file_unsch.insert(1,
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff.miniAOD_customizeMETFiltersFastSim")
1666 if self._options.hltProcess:
1667 if len(self._options.customise_commands) > 1:
1668 self._options.customise_commands = self._options.customise_commands +
" \n" 1669 self._options.customise_commands = self._options.customise_commands +
"process.patTrigger.processName = \""+self._options.hltProcess+
"\"" 1675 ''' Enrich the schedule with event interpretation ''' 1676 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1677 if sequence
in EventInterpretation:
1678 self.EIDefaultCFF = EventInterpretation[sequence]
1679 sequence =
'EIsequence' 1681 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1682 self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1683 self.scheduleSequence(sequence.split(
'.')[-1],
'eventinterpretaion_step')
1687 ''' Enrich the schedule with skimming fragments''' 1689 sequence = sequence.split(
'.')[-1]
1691 skimlist=sequence.split(
'+')
1693 from Configuration.Skimming.autoSkim
import autoSkim
1697 for skim
in skimConfig.__dict__:
1698 skimstream = getattr(skimConfig,skim)
1699 if isinstance(skimstream,cms.Path):
1701 self.blacklist_paths.append(skimstream)
1702 if (
not isinstance(skimstream,cms.FilteredStream)):
1704 shortname = skim.replace(
'SKIMStream',
'')
1705 if (sequence==
"all"):
1707 elif (shortname
in skimlist):
1710 if self._options.datatier==
'DQM':
1711 self.process.load(self.EVTCONTDefaultCFF)
1712 skimstreamDQM = cms.FilteredStream(
1713 responsible = skimstream.responsible,
1714 name = skimstream.name+
'DQM',
1715 paths = skimstream.paths,
1716 selectEvents = skimstream.selectEvents,
1717 content = self._options.datatier+
'EventContent',
1718 dataTier = cms.untracked.string(self._options.datatier)
1721 for i
in range(skimlist.count(shortname)):
1722 skimlist.remove(shortname)
1726 if (skimlist.__len__()!=0
and sequence!=
"all"):
1727 print 'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist)
1728 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1731 ''' Enrich the schedule with a user defined sequence ''' 1737 """ Enrich the schedule with the postreco step """ 1744 print sequence,
"in preparing validation" 1746 from Validation.Configuration.autoValidation
import autoValidation
1748 sequence=sequence.split(
'.')[-1]
1749 if sequence.find(
',')!=-1:
1750 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1751 valSeqName=sequence.split(
',')[1].
split(
'+')
1756 prevalSeqName=sequence.split(
'+')
1757 valSeqName=sequence.split(
'+')
1763 postfix=
'_'+sequence
1764 prevalSeqName=[
'prevalidation'+postfix]
1765 valSeqName=[
'validation'+postfix]
1766 if not hasattr(self.process,valSeqName[0]):
1768 valSeqName=[sequence]
1779 if (
'HLT' in self.stepMap
and not self._options.fast)
or self._options.hltProcess:
1780 for s
in valSeqName+prevalSeqName:
1783 for (i,s)
in enumerate(prevalSeqName):
1785 setattr(self.process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1786 self.schedule.append(getattr(self.process,
'prevalidation_step%s'%NFI(i)))
1788 for (i,s)
in enumerate(valSeqName):
1789 setattr(self.process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1790 self.schedule.append(getattr(self.process,
'validation_step%s'%NFI(i)))
1793 if 'PAT' in self.stepMap
and not 'RECO' in self.stepMap:
1796 if not 'DIGI' in self.stepMap
and not self._options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1797 if self._options.restoreRNDSeeds==
False and not self._options.restoreRNDSeeds==
True:
1798 self._options.restoreRNDSeeds=
True 1800 if not 'DIGI' in self.stepMap
and not self._options.fast:
1804 self._options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1806 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1808 for (i,s)
in enumerate(valSeqName):
1809 getattr(self.process,
'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,
'validation_step%s'%NFI(i))._seq
1815 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1816 It will climb down within PSets, VPSets and VInputTags to find its target""" 1817 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1824 if isinstance(pset, cms._Parameterizable):
1825 for name
in pset.parameters_().
keys():
1831 value = getattr(pset,name)
1832 type = value.pythonTypeName()
1833 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1834 self.
doIt(value,base+
"."+name)
1835 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1836 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1837 elif type
in (
'cms.string',
'cms.untracked.string'):
1839 if self.
_verbose:
print "set string process name %s.%s %s ==> %s"% (base, name, value, self.
_paramReplace)
1841 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1842 for (i,n)
in enumerate(value):
1843 if not isinstance(n, cms.InputTag):
1847 if self.
_verbose:
print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self.
_paramReplace)
1850 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1851 for (i,n)
in enumerate(value):
1854 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1856 if self.
_verbose:
print "set process name %s.%s %s ==> %s " % (base, name, value, self.
_paramReplace)
1857 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1862 label = visitee.label()
1863 except AttributeError:
1864 label =
'<Module not in a Process>' 1866 label =
'other execption' 1867 self.
doIt(visitee, label)
1874 print "Replacing all InputTag %s => %s"%(oldT,newT)
1877 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1878 if not loadMe
in self.additionalCommands:
1879 self.additionalCommands.append(loadMe)
1880 self.additionalCommands.append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1884 if self._options.hltProcess:
1885 proc=self._options.hltProcess
1887 proc=self.process.name_()
1888 if proc==HLTprocess:
return 1890 print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1892 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1893 self.additionalCommands.append(
'from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1894 self.additionalCommands.append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1900 while '@' in repr(seqList)
and level<maxLevel:
1902 for specifiedCommand
in seqList:
1903 if specifiedCommand.startswith(
'@'):
1904 location=specifiedCommand[1:]
1905 if not location
in mapping:
1906 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1907 mappedTo=mapping[location]
1909 mappedTo=mappedTo[index]
1910 seqList.remove(specifiedCommand)
1911 seqList.extend(mappedTo.split(
'+'))
1914 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1920 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1921 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1922 from DQMOffline.Configuration.autoDQM
import autoDQM
1926 if len(set(sequenceList))!=len(sequenceList):
1927 sequenceList=
list(set(sequenceList))
1928 print "Duplicate entries for DQM:, using",sequenceList
1930 pathName=
'dqmoffline_step' 1931 for (i,sequence)
in enumerate(sequenceList):
1933 pathName=
'dqmoffline_%d_step'%(i)
1935 if 'HLT' in self.stepMap.keys()
or self._options.hltProcess:
1938 setattr(self.process,pathName, cms.EndPath( getattr(self.process,sequence ) ) )
1939 self.schedule.append(getattr(self.process,pathName))
1941 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1943 getattr(self.process,pathName).
insert(0,self.process.genstepfilter)
1945 pathName=
'dqmofflineOnPAT_step' 1946 for (i,sequence)
in enumerate(postSequenceList):
1948 pathName=
'dqmofflineOnPAT_%d_step'%(i)
1950 setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1951 self.schedule.append(getattr(self.process,pathName))
1954 """ Enrich the process with harvesting step """ 1955 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self._options.harvesting+
'_cff' 1959 sequence = sequence.split(
'.')[-1]
1962 harvestingList = sequence.split(
"+")
1963 from DQMOffline.Configuration.autoDQM
import autoDQM
1964 from Validation.Configuration.autoValidation
import autoValidation
1966 combined_mapping = copy.deepcopy( autoDQM )
1967 combined_mapping.update( autoValidation )
1968 self.
expandMapping(harvestingList,combined_mapping,index=-1)
1970 if len(set(harvestingList))!=len(harvestingList):
1971 harvestingList=
list(set(harvestingList))
1972 print "Duplicate entries for HARVESTING, using",harvestingList
1974 for name
in harvestingList:
1975 if not name
in harvestingConfig.__dict__:
1976 print name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1978 harvestingstream = getattr(harvestingConfig,name)
1979 if isinstance(harvestingstream,cms.Path):
1980 self.schedule.append(harvestingstream)
1981 self.blacklist_paths.append(harvestingstream)
1982 if isinstance(harvestingstream,cms.Sequence):
1983 setattr(self.process,name+
"_step",cms.Path(harvestingstream))
1984 self.schedule.append(getattr(self.process,name+
"_step"))
1990 """ Enrich the process with AlCaHarvesting step """ 1992 sequence=sequence.split(
".")[-1]
1995 harvestingList = sequence.split(
"+")
1999 from Configuration.AlCa.autoPCL
import autoPCL
2002 for name
in harvestingConfig.__dict__:
2003 harvestingstream = getattr(harvestingConfig,name)
2004 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2005 self.schedule.append(harvestingstream)
2006 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2007 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2008 harvestingList.remove(name)
2010 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2011 self.schedule.append(lastStep)
2013 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2014 print "The following harvesting could not be found : ", harvestingList
2015 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2025 self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2026 self.schedule.append(self.process.reconstruction)
2030 """ Add useful info for the production. """ 2031 self.process.configurationMetadata=cms.untracked.PSet\
2032 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2033 name=cms.untracked.string(
"Applications"),
2034 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2037 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2041 """ Prepare the configuration string and add missing pieces.""" 2053 outputModuleCfgCode=
"" 2054 if not 'HARVESTING' in self.stepMap.keys()
and not 'ALCAHARVEST' in self.stepMap.keys()
and not 'ALCAOUTPUT' in self.stepMap.keys()
and self.with_output:
2059 self.pythonCfgCode =
"# Auto generated configuration file\n" 2060 self.pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2061 self.pythonCfgCode +=
"# with command line options: "+self._options.arguments+
'\n' 2062 self.pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2063 if hasattr(self.
_options,
"era")
and self._options.era :
2064 self.pythonCfgCode +=
"from Configuration.StandardSequences.Eras import eras\n\n" 2065 self.pythonCfgCode +=
"process = cms.Process('"+self.process.name_()+
"'" 2067 for requestedEra
in self._options.era.split(
",") :
2068 self.pythonCfgCode +=
",eras."+requestedEra
2069 self.pythonCfgCode +=
")\n\n" 2071 self.pythonCfgCode +=
"process = cms.Process('"+self.process.name_()+
"')\n\n" 2073 self.pythonCfgCode +=
"# import of standard configurations\n" 2074 for module
in self.imports:
2075 self.pythonCfgCode += (
"process.load('"+module+
"')\n")
2078 if not hasattr(self.process,
"configurationMetadata"):
2082 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2084 self.pythonCfgCode +=
"\n" 2085 for comment,object
in self.addedObjects:
2087 self.pythonCfgCode +=
"\n# "+comment+
"\n" 2088 self.pythonCfgCode +=
dumpPython(self.process,object)
2091 self.pythonCfgCode +=
"\n# Output definition\n" 2092 self.pythonCfgCode += outputModuleCfgCode
2095 self.pythonCfgCode +=
"\n# Additional output definition\n" 2097 nl=self.additionalOutputs.keys()
2100 output = self.additionalOutputs[name]
2101 self.pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2102 tmpOut = cms.EndPath(output)
2103 setattr(self.process,name+
'OutPath',tmpOut)
2104 self.schedule.append(tmpOut)
2107 self.pythonCfgCode +=
"\n# Other statements\n" 2108 for command
in self.additionalCommands:
2109 self.pythonCfgCode += command +
"\n" 2112 for object
in self._options.inlineObjets.split(
','):
2115 if not hasattr(self.process,object):
2116 print 'cannot inline -'+object+
'- : not known' 2118 self.pythonCfgCode +=
'\n' 2119 self.pythonCfgCode +=
dumpPython(self.process,object)
2122 self.pythonCfgCode +=
"\n# Path and EndPath definitions\n" 2123 for path
in self.process.paths:
2124 if getattr(self.process,path)
not in self.blacklist_paths:
2125 self.pythonCfgCode +=
dumpPython(self.process,path)
2127 for endpath
in self.process.endpaths:
2128 if getattr(self.process,endpath)
not in self.blacklist_paths:
2129 self.pythonCfgCode +=
dumpPython(self.process,endpath)
2132 self.pythonCfgCode +=
"\n# Schedule definition\n" 2133 result =
"process.schedule = cms.Schedule(" 2136 self.process.schedule = cms.Schedule()
2137 for item
in self.schedule:
2138 if not isinstance(item, cms.Schedule):
2139 self.process.schedule.append(item)
2141 self.process.schedule.extend(item)
2143 if hasattr(self.process,
"HLTSchedule"):
2144 beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2145 afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2146 pathNames = [
'process.'+p.label_()
for p
in beforeHLT]
2147 result +=
','.
join(pathNames)+
')\n' 2148 result +=
'process.schedule.extend(process.HLTSchedule)\n' 2149 pathNames = [
'process.'+p.label_()
for p
in afterHLT]
2150 result +=
'process.schedule.extend(['+
','.
join(pathNames)+
'])\n' 2152 pathNames = [
'process.'+p.label_()
for p
in self.schedule]
2153 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2155 self.pythonCfgCode += result
2157 for labelToAssociate
in self.labelsToAssociate:
2158 self.process.schedule.associate(getattr(self.process, labelToAssociate))
2159 self.pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2163 self.pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2164 self.pythonCfgCode+=
"associatePatAlgosToolsTask(process)\n" 2166 if self._options.nThreads
is not "1":
2167 self.pythonCfgCode +=
"\n" 2168 self.pythonCfgCode +=
"#Setup FWK for multithreaded\n" 2169 self.pythonCfgCode +=
"process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+
")\n" 2170 self.pythonCfgCode +=
"process.options.numberOfStreams=cms.untracked.uint32(0)\n" 2172 if self._options.isRepacked:
2173 self.pythonCfgCode +=
"\n" 2174 self.pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2175 self.pythonCfgCode +=
"MassReplaceInputTag(process)\n" 2176 MassReplaceInputTag(self.process)
2179 if self.productionFilterSequence:
2180 self.pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2181 self.pythonCfgCode +=
'for path in process.paths:\n' 2182 if len(self.conditionalPaths):
2183 self.pythonCfgCode +=
'\tif not path in %s: continue\n'%
str(self.conditionalPaths)
2184 if len(self.excludedPaths):
2185 self.pythonCfgCode +=
'\tif path in %s: continue\n'%
str(self.excludedPaths)
2186 self.pythonCfgCode +=
'\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2187 pfs = getattr(self.process,self.productionFilterSequence)
2188 for path
in self.process.paths:
2189 if not path
in self.conditionalPaths:
continue 2190 if path
in self.excludedPaths:
continue 2191 getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2197 if self._options.runUnscheduled:
2200 self.pythonCfgCode+=
"#do not add changes to your config after this point (unless you know what you are doing)\n" 2201 self.pythonCfgCode+=
"from FWCore.ParameterSet.Utilities import convertToUnscheduled\n" 2202 self.pythonCfgCode+=
"process=convertToUnscheduled(process)\n" 2204 from FWCore.ParameterSet.Utilities
import convertToUnscheduled
2215 self.pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2216 self.pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2217 self.pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2218 self.pythonCfgCode +=
"# End adding early deletion\n" 2219 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2225 if self._options.io:
2227 if not self._options.io.endswith(
'.io'): self._option.io+=
'.io' 2228 io=open(self._options.io,
'w')
2230 if hasattr(self.process.source,
"fileNames"):
2231 if len(self.process.source.fileNames.value()):
2232 ioJson[
'primary']=self.process.source.fileNames.value()
2233 if hasattr(self.process.source,
"secondaryFileNames"):
2234 if len(self.process.source.secondaryFileNames.value()):
2235 ioJson[
'secondary']=self.process.source.secondaryFileNames.value()
2236 if self._options.pileup_input
and (self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:')):
2237 ioJson[
'pileup']=self._options.pileup_input[4:]
2238 for (o,om)
in self.process.outputModules_().
items():
2239 ioJson[o]=om.fileName.value()
2240 ioJson[
'GT']=self.process.GlobalTag.globaltag.value()
2241 if self.productionFilterSequence:
2242 ioJson[
'filter']=self.productionFilterSequence
2244 io.write(json.dumps(ioJson))
def load(self, includeFile)
def filesFromOption(self)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def prepare_L1REPACK(self, sequence=None)
def finalizeFastSimHLT(self)
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def doNotInlineEventContent(instance, label="cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)")
event content
def customiseEarlyDelete(process)
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_DIGIPREMIX(self, sequence=None)
def prepare_RECO(self, sequence="reconstruction")
put the filtering path in the schedule
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def addCustomise(self, unsch=0)
def prepare_DIGIPREMIX_S2(self, sequence=None)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
now for a useful command
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, sequence=None)
def convertToUnscheduled(proc)
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
inliner
needs to be in reverse order
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
def prepare_ALCAHARVEST(self, sequence=None)
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def doIt(self, pset, base)
def completeInputCommand(self)
add the corresponding input content
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def addStandardSequences(self)
def prepare_PAT(self, sequence="miniAOD")