3 __version__ =
"$Revision: 1.19 $" 4 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 6 import FWCore.ParameterSet.Config
as cms
7 from FWCore.ParameterSet.Modules
import _Module
11 from subprocess
import Popen,PIPE
12 import FWCore.ParameterSet.DictTypes
as DictTypes
18 defaultOptions.datamix =
'DataOnSim' 19 defaultOptions.isMC=
False 20 defaultOptions.isData=
True 21 defaultOptions.step=
'' 22 defaultOptions.pileup=
'NoPileUp' 23 defaultOptions.pileup_input =
None 24 defaultOptions.pileup_dasoption =
'' 25 defaultOptions.geometry =
'SimDB' 26 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
27 defaultOptions.magField =
'' 28 defaultOptions.conditions =
None 29 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
30 defaultOptions.harvesting=
'AtRunEnd' 31 defaultOptions.gflash =
False 32 defaultOptions.number = -1
33 defaultOptions.number_out =
None 34 defaultOptions.arguments =
"" 35 defaultOptions.name =
"NO NAME GIVEN" 36 defaultOptions.evt_type =
"" 37 defaultOptions.filein =
"" 38 defaultOptions.dasquery=
"" 39 defaultOptions.dasoption=
"" 40 defaultOptions.secondfilein =
"" 41 defaultOptions.customisation_file = []
42 defaultOptions.customisation_file_unsch = []
43 defaultOptions.customise_commands =
"" 44 defaultOptions.inline_custom=
False 45 defaultOptions.particleTable =
'pythiapdt' 46 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
47 defaultOptions.dirin =
'' 48 defaultOptions.dirout =
'' 49 defaultOptions.filetype =
'EDM' 50 defaultOptions.fileout =
'output.root' 51 defaultOptions.filtername =
'' 52 defaultOptions.lazy_download =
False 53 defaultOptions.custom_conditions =
'' 54 defaultOptions.hltProcess =
'' 55 defaultOptions.eventcontent =
None 56 defaultOptions.datatier =
None 57 defaultOptions.inlineEventContent =
True 58 defaultOptions.inlineObjets =
'' 59 defaultOptions.hideGen=
False 60 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
61 defaultOptions.beamspot=
None 62 defaultOptions.outputDefinition =
'' 63 defaultOptions.inputCommands =
None 64 defaultOptions.outputCommands =
None 65 defaultOptions.inputEventContent =
'' 66 defaultOptions.dropDescendant =
False 67 defaultOptions.relval =
None 68 defaultOptions.profile =
None 69 defaultOptions.isRepacked =
False 70 defaultOptions.restoreRNDSeeds =
False 71 defaultOptions.donotDropOnInput =
'' 72 defaultOptions.python_filename =
'' 73 defaultOptions.io=
None 74 defaultOptions.lumiToProcess=
None 75 defaultOptions.fast=
False 76 defaultOptions.runsAndWeightsForMC =
None 77 defaultOptions.runsScenarioForMC =
None 78 defaultOptions.runUnscheduled =
False 79 defaultOptions.timeoutOutput =
False 80 defaultOptions.nThreads =
'1' 84 theObject = getattr(process,name)
85 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
86 return "process."+name+
" = " + theObject.dumpPython(
"process")
87 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
88 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 90 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 93 import FWCore.ParameterSet.Config
as cms
96 for line
in open(fileName,
'r'): 97 if line.count(
".root")>=2:
99 entries=line.replace(
"\n",
"").
split()
100 if not entries[0]
in prim:
101 prim.append(entries[0])
102 if not entries[1]
in sec:
103 sec.append(entries[1])
104 elif (line.find(
".root")!=-1):
105 entry=line.replace(
"\n",
"")
106 if not entry
in prim:
109 if not hasattr(s,
"fileNames"):
110 s.fileNames=cms.untracked.vstring(prim)
112 s.fileNames.extend(prim)
114 if not hasattr(s,
"secondaryFileNames"):
115 s.secondaryFileNames=cms.untracked.vstring(sec)
117 s.secondaryFileNames.extend(sec)
118 print "found files: ",prim
120 raise Exception(
"There are not files in input from the file list")
122 print "found parent files:",sec
127 import FWCore.ParameterSet.Config
as cms
130 print "the query is",query
133 while eC!=0
and count<3:
135 print 'Sleeping, then retrying DAS' 137 p = Popen(
'das_client %s --query "%s"'%(option,query), stdout=PIPE,shell=
True)
139 tupleP = os.waitpid(p.pid, 0)
143 print "DAS succeeded after",count,
"attempts",eC
145 print "DAS failed 3 times- I give up" 146 for line
in pipe.split(
'\n'):
147 if line.count(
".root")>=2:
149 entries=line.replace(
"\n",
"").
split()
150 if not entries[0]
in prim:
151 prim.append(entries[0])
152 if not entries[1]
in sec:
153 sec.append(entries[1])
154 elif (line.find(
".root")!=-1):
155 entry=line.replace(
"\n",
"")
156 if not entry
in prim:
159 if not hasattr(s,
"fileNames"):
160 s.fileNames=cms.untracked.vstring(prim)
162 s.fileNames.extend(prim)
164 if not hasattr(s,
"secondaryFileNames"):
165 s.secondaryFileNames=cms.untracked.vstring(sec)
167 s.secondaryFileNames.extend(sec)
168 print "found files: ",prim
170 print "found parent files:",sec
175 for s
in aProcess.paths_().
keys():
177 for s
in aProcess.endpaths_().
keys():
180 def anyOf(listOfKeys,dict,opt=None):
189 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
192 """The main building routines """ 194 def __init__(self, options, process = None, with_output = False, with_input = False ):
195 """options taken from old cmsDriver and optparse """ 197 options.outfile_name = options.dirout+options.fileout
201 if self._options.isData
and options.isMC:
202 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
207 if 'ENDJOB' in self._options.step:
208 if (hasattr(self.
_options,
"outputDefinition")
and \
209 self._options.outputDefinition !=
'' and \
210 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self._options.outputDefinition)))
or \
211 (hasattr(self.
_options,
"datatier")
and \
212 self._options.datatier
and \
213 'DQMIO' in self._options.datatier):
214 print "removing ENDJOB from steps since not compatible with DQMIO dataTier" 215 self._options.step=self._options.step.replace(
',ENDJOB',
'')
220 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
223 for step
in self._options.step.split(
","):
224 if step==
'':
continue 225 stepParts = step.split(
":")
226 stepName = stepParts[0]
227 if stepName
not in stepList
and not stepName.startswith(
're'):
228 raise ValueError(
"Step "+stepName+
" unknown")
229 if len(stepParts)==1:
230 self.stepMap[stepName]=
"" 231 elif len(stepParts)==2:
232 self.stepMap[stepName]=stepParts[1].
split(
'+')
233 elif len(stepParts)==3:
234 self.stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
236 raise ValueError(
"Step definition "+step+
" invalid")
237 self.stepKeys.append(stepName)
241 self.with_output = with_output
242 if hasattr(self.
_options,
"no_output_flag")
and self._options.no_output_flag:
243 self.with_output =
False 244 self.with_input = with_input
246 self.process = cms.Process(self._options.name)
248 self.process = process
250 self.importsUnsch = []
252 self.schedule =
list()
258 self.additionalCommands = []
260 self.blacklist_paths = []
261 self.addedObjects = []
262 self.additionalOutputs = {}
264 self.productionFilterSequence =
None 265 self.nextScheduleIsConditional=
False 266 self.conditionalPaths=[]
267 self.excludedPaths=[]
272 Function to add the igprof profile service so that you can dump in the middle 275 profileOpts = self._options.profile.split(
':')
277 profilerInterval = 100
278 profilerFormat =
None 279 profilerJobFormat =
None 285 startEvent = profileOpts.pop(0)
286 if not startEvent.isdigit():
287 raise Exception(
"%s is not a number" % startEvent)
288 profilerStart =
int(startEvent)
290 eventInterval = profileOpts.pop(0)
291 if not eventInterval.isdigit():
292 raise Exception(
"%s is not a number" % eventInterval)
293 profilerInterval =
int(eventInterval)
295 profilerFormat = profileOpts.pop(0)
298 if not profilerFormat:
299 profilerFormat =
"%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace(
"_cfi",
""),
301 self._options.pileup,
302 self._options.conditions,
303 self._options.datatier,
304 self._options.profileTypeLabel)
305 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
306 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
307 elif not profilerJobFormat:
308 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 310 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
313 includeFile = includeFile.replace(
'/',
'.')
314 self.process.load(includeFile)
315 return sys.modules[includeFile]
318 """helper routine to load am memorize imports""" 321 includeFile = includeFile.replace(
'/',
'.')
323 self.imports.append(includeFile)
324 self.process.load(includeFile)
325 return sys.modules[includeFile]
327 self.importsUnsch.append(includeFile)
331 """helper routine to remember replace statements""" 332 self.additionalCommands.append(command)
333 if not command.strip().startswith(
"#"):
336 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
340 if 'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys():
341 self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring(
'ProductNotFound'),fileMode = cms.untracked.string(
'FULLMERGE'))
343 self.process.options = cms.untracked.PSet( )
345 if self._options.runUnscheduled:
346 self.process.options.allowUnscheduled=cms.untracked.bool(
True)
348 self.addedObjects.append((
"",
"options"))
350 if self._options.lazy_download:
351 self.process.AdaptorConfig = cms.Service(
"AdaptorConfig",
352 stats = cms.untracked.bool(
True),
353 enable = cms.untracked.bool(
True),
354 cacheHint = cms.untracked.string(
"lazy-download"),
355 readHint = cms.untracked.string(
"read-ahead-buffered")
357 self.addedObjects.append((
"Setup lazy download",
"AdaptorConfig"))
362 if self._options.profile:
364 self.process.IgProfService = cms.Service(
"IgProfService",
365 reportFirstEvent = cms.untracked.int32(start),
366 reportEventInterval = cms.untracked.int32(interval),
367 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
368 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
369 self.addedObjects.append((
"Setup IGProf Service for profiling",
"IgProfService"))
372 """Here we decide how many evts will be processed""" 373 self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(
int(self._options.number)))
374 if self._options.number_out:
375 self.process.maxEvents.output = cms.untracked.int32(
int(self._options.number_out))
376 self.addedObjects.append((
"",
"maxEvents"))
379 """Here the source is built. Priority: file, generator""" 380 self.addedObjects.append((
"Input source",
"source"))
383 for entry
in self._options.filein.split(
','):
385 if entry.startswith(
"filelist:"):
387 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
388 filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
390 self.process.source.fileNames.append(self._options.dirin+entry)
391 if self._options.secondfilein:
392 if not hasattr(self.process.source,
"secondaryFileNames"):
393 raise Exception(
"--secondfilein not compatible with "+self._options.filetype+
"input type")
394 for entry
in self._options.secondfilein.split(
','):
396 if entry.startswith(
"filelist:"):
397 self.process.source.secondaryFileNames.extend((
filesFromList(entry[9:]))[0])
398 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
399 self.process.source.secondaryFileNames.extend((
filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
401 self.process.source.secondaryFileNames.append(self._options.dirin+entry)
403 if self._options.filein
or self._options.dasquery:
404 if self._options.filetype ==
"EDM":
405 self.process.source=cms.Source(
"PoolSource",
406 fileNames = cms.untracked.vstring(),
407 secondaryFileNames= cms.untracked.vstring())
409 elif self._options.filetype ==
"DAT":
410 self.process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
412 elif self._options.filetype ==
"LHE":
413 self.process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
414 if self._options.filein.startswith(
"lhe:"):
416 args=self._options.filein.split(
':')
418 print 'LHE input from article ',article
419 location=
'/store/lhe/' 421 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
422 for line
in textOfFiles:
423 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
424 self.process.source.fileNames.append(location+article+
'/'+fileName)
427 print 'Issue to load LHE files, please check and try again.' 430 self.process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
435 elif self._options.filetype ==
"DQM":
436 self.process.source=cms.Source(
"DQMRootSource",
437 fileNames = cms.untracked.vstring())
440 elif self._options.filetype ==
"DQMDAQ":
442 self.process.source=cms.Source(
"DQMStreamerReader")
445 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
446 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
448 if self._options.dasquery!=
'':
449 self.process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
450 filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
453 if 'GEN' in self.stepMap.keys():
454 if self._options.inputCommands:
455 self._options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 457 self._options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 459 if self.process.source
and self._options.inputCommands:
460 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
461 for command
in self._options.inputCommands.split(
','):
463 command = command.strip()
464 if command==
'':
continue 465 self.process.source.inputCommands.append(command)
466 if not self._options.dropDescendant:
467 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
469 if self._options.lumiToProcess:
470 import FWCore.PythonUtilities.LumiList
as LumiList
471 self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange(
LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().
split(
',') )
473 if 'GEN' in self.stepMap.keys()
or 'LHE' in self.stepMap
or (
not self._options.filein
and hasattr(self.
_options,
"evt_type")):
474 if self.process.source
is None:
475 self.process.source=cms.Source(
"EmptySource")
478 self.runsAndWeights=
None 479 if self._options.runsAndWeightsForMC
or self._options.runsScenarioForMC :
480 if not self._options.isMC :
481 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
482 if self._options.runsAndWeightsForMC:
483 self.runsAndWeights = eval(self._options.runsAndWeightsForMC)
485 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
486 if type(RunsAndWeights[self._options.runsScenarioForMC])==str:
487 __import__(RunsAndWeights[self._options.runsScenarioForMC])
488 self.runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
490 self.runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
492 if self.runsAndWeights:
493 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
495 self.additionalCommands.append(
'import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
496 self.additionalCommands.append(
'ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.runsAndWeights))
501 """ Add output module to the process """ 503 if self._options.outputDefinition:
504 if self._options.datatier:
505 print "--datatier & --eventcontent options ignored" 508 outList = eval(self._options.outputDefinition)
509 for (id,outDefDict)
in enumerate(outList):
510 outDefDictStr=outDefDict.__str__()
511 if not isinstance(outDefDict,dict):
512 raise Exception(
"--output needs to be passed a list of dict"+self._options.outputDefinition+
" is invalid")
514 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
517 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
518 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
519 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
520 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
521 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
523 if not theModuleLabel:
524 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
525 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
526 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 528 for name
in tryNames:
529 if not hasattr(self.process,name):
532 if not theModuleLabel:
533 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
535 defaultFileName=self._options.outfile_name
537 defaultFileName=self._options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
539 theFileName=self._options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
540 if not theFileName.endswith(
'.root'):
543 if len(outDefDict.keys()):
544 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
545 if theStreamType==
'DQMIO': theStreamType=
'DQM' 546 if theStreamType==
'ALL':
547 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
549 theEventContent = getattr(self.process, theStreamType+
"EventContent")
553 if theStreamType==
'ALCARECO' and not theFilterName:
554 theFilterName=
'StreamALCACombined' 557 CppType=
'PoolOutputModule' 558 if self._options.timeoutOutput:
559 CppType=
'TimeoutPoolOutputModule' 560 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 561 output = cms.OutputModule(CppType,
562 theEventContent.clone(),
563 fileName = cms.untracked.string(theFileName),
564 dataset = cms.untracked.PSet(
565 dataTier = cms.untracked.string(theTier),
566 filterName = cms.untracked.string(theFilterName))
568 if not theSelectEvent
and hasattr(self.process,
'generation_step')
and theStreamType!=
'LHE':
569 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
570 if not theSelectEvent
and hasattr(self.process,
'filtering_step'):
571 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
573 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
576 if not hasattr(output,
'SelectEvents'):
577 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
578 for alca
in self.AlCaPaths:
579 output.SelectEvents.SelectEvents.extend(getattr(self.process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
582 if hasattr(self.process,theModuleLabel):
583 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
585 setattr(self.process,theModuleLabel,output)
586 outputModule=getattr(self.process,theModuleLabel)
587 setattr(self.process,theModuleLabel+
'_step',cms.EndPath(outputModule))
588 path=getattr(self.process,theModuleLabel+
'_step')
589 self.schedule.append(path)
591 if not self._options.inlineEventContent
and hasattr(self.process,theStreamType+
"EventContent"):
594 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
595 if theExtraOutputCommands:
596 if not isinstance(theExtraOutputCommands,list):
597 raise Exception(
"extra ouput command in --option must be a list of strings")
598 if hasattr(self.process,theStreamType+
"EventContent"):
599 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
601 outputModule.outputCommands.extend(theExtraOutputCommands)
603 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
608 streamTypes=self._options.eventcontent.split(
',')
609 tiers=self._options.datatier.split(
',')
610 if not self._options.outputDefinition
and len(streamTypes)!=len(tiers):
611 raise Exception(
"number of event content arguments does not match number of datatier arguments")
614 if self._options.step.split(
',')[0].
split(
':')[0] ==
'ALCA':
617 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
618 if streamType==
'':
continue 619 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self._options.step:
continue 620 if streamType==
'DQMIO': streamType=
'DQM' 621 theEventContent = getattr(self.process, streamType+
"EventContent")
623 theFileName=self._options.outfile_name
624 theFilterName=self._options.filtername
626 theFileName=self._options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
627 theFilterName=self._options.filtername
628 CppType=
'PoolOutputModule' 629 if self._options.timeoutOutput:
630 CppType=
'TimeoutPoolOutputModule' 631 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 632 output = cms.OutputModule(CppType,
634 fileName = cms.untracked.string(theFileName),
635 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
636 filterName = cms.untracked.string(theFilterName)
639 if hasattr(self.process,
"generation_step")
and streamType!=
'LHE':
640 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
641 if hasattr(self.process,
"filtering_step"):
642 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
644 if streamType==
'ALCARECO':
645 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
647 if "MINIAOD" in streamType:
648 output.dropMetaData = cms.untracked.string(
'ALL')
649 output.fastCloning= cms.untracked.bool(
False)
650 output.overrideInputFileSplitLevels = cms.untracked.bool(
True)
652 outputModuleName=streamType+
'output' 653 setattr(self.process,outputModuleName,output)
654 outputModule=getattr(self.process,outputModuleName)
655 setattr(self.process,outputModuleName+
'_step',cms.EndPath(outputModule))
656 path=getattr(self.process,outputModuleName+
'_step')
657 self.schedule.append(path)
659 if self._options.outputCommands
and streamType!=
'DQM':
660 for evct
in self._options.outputCommands.split(
','):
661 if not evct:
continue 662 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
664 if not self._options.inlineEventContent:
667 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
669 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
675 Add selected standard sequences to the process 678 if self._options.pileup:
679 pileupSpec=self._options.pileup.split(
',')[0]
682 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
683 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
684 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
688 if '.' in pileupSpec:
689 mixingDict={
'file':pileupSpec}
690 elif pileupSpec.startswith(
'file:'):
691 mixingDict={
'file':pileupSpec[5:]}
694 mixingDict=copy.copy(Mixing[pileupSpec])
695 if len(self._options.pileup.split(
','))>1:
696 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(
',')+1:]))
699 if 'file:' in pileupSpec:
701 self.process.load(mixingDict[
'file'])
702 print "inlining mixing module configuration" 703 self._options.inlineObjets+=
',mix' 705 self.loadAndRemember(mixingDict[
'file'])
707 mixingDict.pop(
'file')
708 if not "DATAMIX" in self.stepMap.keys():
709 if self._options.pileup_input:
710 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
711 mixingDict[
'F']=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
712 elif self._options.pileup_input.startswith(
"filelist:"):
713 mixingDict[
'F']=(
filesFromList(self._options.pileup_input[9:]))[0]
715 mixingDict[
'F']=self._options.pileup_input.split(
',')
717 for command
in specialization:
718 self.executeAndRemember(command)
719 if len(mixingDict)!=0:
720 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
725 if len(self.stepMap):
726 self.loadAndRemember(self.GeometryCFF)
727 if (
'SIM' in self.stepMap
or 'reSIM' in self.stepMap)
and not self._options.fast:
728 self.loadAndRemember(self.SimGeometryCFF)
729 if self.geometryDBLabel:
730 self.executeAndRemember(
'process.XMLFromDBSource.label = cms.string("%s")'%(self.geometryDBLabel))
732 print "Geometry option",self._options.geometry,
"unknown." 735 if len(self.stepMap):
736 self.loadAndRemember(self.magFieldCFF)
738 for stepName
in self.stepKeys:
739 stepSpec = self.stepMap[stepName]
740 print "Step:", stepName,
"Spec:",stepSpec
741 if stepName.startswith(
're'):
743 if stepName[2:]
not in self._options.donotDropOnInput:
744 self._options.inputEventContent=
'%s,%s'%(stepName.upper(),self._options.inputEventContent)
745 stepName=stepName[2:]
747 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
748 elif type(stepSpec)==list:
749 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
750 elif type(stepSpec)==tuple:
751 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
753 raise ValueError(
"Invalid step definition")
755 if self._options.restoreRNDSeeds!=
False:
757 if self._options.restoreRNDSeeds==
True:
758 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
760 self.executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
761 if self._options.inputEventContent
or self._options.inputCommands:
762 if self._options.inputCommands:
763 self._options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 765 self._options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 769 if self._options.inputEventContent:
771 def dropSecondDropStar(iec):
782 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
783 for evct
in self._options.inputEventContent.split(
','):
784 if evct==
'':
continue 785 theEventContent = getattr(self.process, evct+
"EventContent")
786 if hasattr(theEventContent,
'outputCommands'):
787 self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
788 if hasattr(theEventContent,
'inputCommands'):
789 self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
791 dropSecondDropStar(self.process.source.inputCommands)
793 if not self._options.dropDescendant:
794 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
800 """Add conditions to the process""" 801 if not self._options.conditions:
return 803 if 'FrontierConditions_GlobalTag' in self._options.conditions:
804 print 'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line' 805 self._options.conditions = self._options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
807 self.loadAndRemember(self.ConditionsDefaultCFF)
809 self.process.GlobalTag =
GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
810 self.additionalCommands.append(
'from Configuration.AlCa.GlobalTag import GlobalTag')
811 self.additionalCommands.append(
'process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
815 """Include the customise code """ 819 for c
in self._options.customisation_file:
820 custOpt.extend(c.split(
","))
822 for c
in self._options.customisation_file_unsch:
823 custOpt.extend(c.split(
","))
829 raise Exception(
"more than . in the specification:"+opt)
830 fileName=opt.split(
'.')[0]
831 if opt.count(
'.')==0: rest=
'customise' 833 rest=opt.split(
'.')[1]
834 if rest==
'py': rest=
'customise' 836 if fileName
in custMap:
837 custMap[fileName].extend(rest.split(
'+'))
839 custMap[fileName]=rest.split(
'+')
844 final_snippet=
'\n# customisation of the process.\n' 848 allFcn.extend(custMap[opt])
850 if allFcn.count(fcn)!=1:
851 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
855 packageName = f.replace(
".py",
"").
replace(
"/",
".")
856 __import__(packageName)
857 package = sys.modules[packageName]
860 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
862 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 863 if self._options.inline_custom:
864 for line
in file(customiseFile,
'r'): 865 if "import FWCore.ParameterSet.Config" in line:
867 final_snippet += line
869 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
870 for fcn
in custMap[f]:
871 print "customising the process with",fcn,
"from",f
872 if not hasattr(package,fcn):
874 raise Exception(
"config "+f+
" has no function "+fcn)
876 self.process=getattr(package,fcn)(self.process)
878 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
879 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
882 final_snippet +=
'\n# End of customisation functions\n' 888 final_snippet=
'\n# Customisation from command line\n' 889 if self._options.customise_commands:
891 for com
in self._options.customise_commands.split(
'\\n'):
892 com=string.lstrip(com)
894 final_snippet +=
'\n'+com
903 if len(self.stepMap):
905 if self._options.particleTable
not in defaultOptions.particleTableList:
906 print 'Invalid particle table provided. Options are:' 907 print defaultOptions.particleTable
910 if len(self.stepMap):
911 self.
loadAndRemember(
'SimGeneral.HepPDTESSource.'+self._options.particleTable+
'_cfi')
928 self.EIDefaultCFF=
None 929 self.SKIMDefaultCFF=
"Configuration/StandardSequences/Skims_cff" 930 self.POSTRECODefaultCFF=
"Configuration/StandardSequences/PostRecoGenerator_cff" 931 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/Validation_cff" 932 self.L1HwValDefaultCFF =
"Configuration/StandardSequences/L1HwVal_cff" 933 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOffline_cff" 934 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/Harvesting_cff" 935 self.ALCAHARVESTDefaultCFF=
"Configuration/StandardSequences/AlCaHarvesting_cff" 936 self.ENDJOBDefaultCFF=
"Configuration/StandardSequences/EndOfProcess_cff" 937 self.ConditionsDefaultCFF =
"Configuration/StandardSequences/FrontierConditions_GlobalTag_cff" 938 self.CFWRITERDefaultCFF =
"Configuration/StandardSequences/CrossingFrameWriter_cff" 939 self.REPACKDefaultCFF=
"Configuration/StandardSequences/DigiToRaw_Repack_cff" 941 if "DATAMIX" in self.stepMap.keys():
942 self.DATAMIXDefaultCFF=
"Configuration/StandardSequences/DataMixer"+self._options.datamix+
"_cff" 943 if self._options.datamix ==
'PreMix':
944 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiDMPreMix_cff" 948 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 950 if "DIGIPREMIX" in self.stepMap.keys():
951 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/Digi_PreMix_cff" 953 self.ALCADefaultSeq=
None 954 self.LHEDefaultSeq=
'externalLHEProducer' 955 self.GENDefaultSeq=
'pgen' 956 self.SIMDefaultSeq=
'psim' 957 self.DIGIDefaultSeq=
'pdigi' 958 self.DIGIPREMIXDefaultSeq=
'pdigi' 959 self.DIGIPREMIX_S2DefaultSeq=
'pdigi' 960 self.DATAMIXDefaultSeq=
None 961 self.DIGI2RAWDefaultSeq=
'DigiToRaw' 962 self.HLTDefaultSeq=
'GRun' 963 self.L1DefaultSeq=
None 964 self.L1REPACKDefaultSeq=
'GT' 965 self.HARVESTINGDefaultSeq=
None 966 self.ALCAHARVESTDefaultSeq=
None 967 self.CFWRITERDefaultSeq=
None 968 self.RAW2DIGIDefaultSeq=
'RawToDigi' 969 self.L1RecoDefaultSeq=
'L1Reco' 970 self.L1TrackTriggerDefaultSeq=
'L1TrackTrigger' 971 if self._options.fast
or (
'RAW2DIGI' in self.stepMap
and 'RECO' in self.stepMap):
972 self.RECODefaultSeq=
'reconstruction' 974 self.RECODefaultSeq=
'reconstruction_fromRECO' 976 self.EIDefaultSeq=
'top' 977 self.POSTRECODefaultSeq=
None 978 self.L1HwValDefaultSeq=
'L1HwVal' 979 self.DQMDefaultSeq=
'DQMOffline' 980 self.VALIDATIONDefaultSeq=
'' 981 self.ENDJOBDefaultSeq=
'endOfProcess' 982 self.REPACKDefaultSeq=
'DigiToRawRepack' 983 self.PATDefaultSeq=
'miniAOD' 985 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContent_cff" 987 if not self._options.beamspot:
988 self._options.beamspot=VtxSmearedDefaultKey
991 if self._options.isMC==
True:
993 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 994 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 995 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineMC_cff" 996 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 998 self._options.beamspot =
None 1001 if 'reGEN' in self.stepMap:
1002 self.GENDefaultSeq=
'fixGenInfo' 1004 if self._options.scenario==
'cosmics':
1005 self._options.pileup=
'Cosmics' 1006 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1007 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1008 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1009 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentCosmics_cff" 1010 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationCosmics_cff" 1011 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmics_cff" 1012 if self._options.isMC==
True:
1013 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineCosmicsMC_cff" 1014 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingCosmics_cff" 1015 self.RECODefaultSeq=
'reconstructionCosmics' 1016 self.DQMDefaultSeq=
'DQMOfflineCosmics' 1018 if self._options.scenario==
'HeavyIons':
1019 if not self._options.beamspot:
1020 self._options.beamspot=VtxSmearedHIDefaultKey
1021 self.HLTDefaultSeq =
'HIon' 1022 self.VALIDATIONDefaultCFF=
"Configuration/StandardSequences/ValidationHeavyIons_cff" 1023 self.VALIDATIONDefaultSeq=
'' 1024 self.EVTCONTDefaultCFF=
"Configuration/EventContent/EventContentHeavyIons_cff" 1025 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1026 self.RECODefaultSeq=
'reconstructionHeavyIons' 1027 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1028 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIons_cff" 1029 self.DQMDefaultSeq=
'DQMOfflineHeavyIons' 1030 self.SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1031 self.HARVESTINGDefaultCFF=
"Configuration/StandardSequences/HarvestingHeavyIons_cff" 1032 if self._options.isMC==
True:
1033 self.DQMOFFLINEDefaultCFF=
"DQMOffline/Configuration/DQMOfflineHeavyIonsMC_cff" 1036 self.RAW2RECODefaultSeq=
','.
join([self.RAW2DIGIDefaultSeq,self.RECODefaultSeq])
1038 self.USERDefaultSeq=
'user' 1039 self.USERDefaultCFF=
None 1042 if self._options.isData:
1043 if self._options.magField==defaultOptions.magField:
1044 print "magnetic field option forced to: AutoFromDBCurrent" 1045 self._options.magField=
'AutoFromDBCurrent' 1046 self.magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace(
'.',
'')+
'_cff' 1047 self.magFieldCFF = self.magFieldCFF.replace(
"__",
'_')
1050 self.GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff' 1051 self.geometryDBLabel=
None 1053 if self._options.fast:
1054 if 'start' in self._options.conditions.lower():
1055 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1057 self.GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1060 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1061 if opt
in GeometryConf:
1062 return GeometryConf[opt]
1066 geoms=self._options.geometry.split(
',')
1070 if '/' in geoms[1]
or '_cff' in geoms[1]:
1071 self.GeometryCFF=geoms[1]
1073 self.GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1075 if (geoms[0].startswith(
'DB:')):
1076 self.SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff' 1077 self.geometryDBLabel=geoms[0][3:]
1080 if '/' in geoms[0]
or '_cff' in geoms[0]:
1081 self.SimGeometryCFF=geoms[0]
1083 simGeometry=geoms[0]
1084 if self._options.gflash==
True:
1085 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1087 self.SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1090 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1091 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1093 if self._options.scenario==
'nocoll' or self._options.scenario==
'cosmics':
1094 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1095 self._options.beamspot=
'NoSmear' 1098 if self._options.fast:
1099 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1100 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1101 self.RECOBEFMIXDefaultCFF =
'FastSimulation.Configuration.Reconstruction_BefMix_cff' 1102 self.RECOBEFMIXDefaultSeq =
'reconstruction_befmix' 1103 self.DQMOFFLINEDefaultCFF=
"FastSimulation.Configuration.DQMOfflineMC_cff" 1106 if self._options.pileup==
'default':
1107 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1108 self._options.pileup=MixingDefaultKey
1112 if self._options.isData:
1113 self._options.pileup=
None 1116 self.REDIGIDefaultSeq=self.DIGIDefaultSeq
1121 output = cms.OutputModule(
"PoolOutputModule")
1122 if stream.selectEvents.parameters_().__len__()!=0:
1123 output.SelectEvents = stream.selectEvents
1125 output.SelectEvents = cms.untracked.PSet()
1126 output.SelectEvents.SelectEvents=cms.vstring()
1127 if isinstance(stream.paths,tuple):
1128 for path
in stream.paths:
1129 output.SelectEvents.SelectEvents.append(path.label())
1131 output.SelectEvents.SelectEvents.append(stream.paths.label())
1135 if isinstance(stream.content,str):
1136 evtPset=getattr(self.process,stream.content)
1137 for p
in evtPset.parameters_():
1138 setattr(output,p,getattr(evtPset,p))
1139 if not self._options.inlineEventContent:
1142 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1144 output.outputCommands = stream.content
1147 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1149 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1150 filterName = cms.untracked.string(stream.name))
1152 if self._options.filtername:
1153 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1156 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1158 if workflow
in (
"producers,full"):
1159 if isinstance(stream.paths,tuple):
1160 for path
in stream.paths:
1161 self.schedule.append(path)
1163 self.schedule.append(stream.paths)
1167 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1168 self.additionalOutputs[name] = output
1169 setattr(self.process,name,output)
1171 if workflow ==
'output':
1173 filterList = output.SelectEvents.SelectEvents
1174 for i, filter
in enumerate(filterList):
1175 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1185 if ( len(sequence.split(
'.'))==1 ):
1187 elif ( len(sequence.split(
'.'))==2 ):
1189 sequence=sequence.split(
'.')[1]
1191 print "sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a" 1192 print sequence,
"not recognized" 1199 for i,s
in enumerate(seq.split(
'*')):
1201 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, s) ))
1203 p=getattr(self.process,prefix)
1204 p+=getattr(self.process, s)
1205 self.schedule.append(getattr(self.process,prefix))
1210 if self.nextScheduleIsConditional:
1211 self.conditionalPaths.append(prefix)
1212 setattr(self.process,prefix,getattr(cms,what)( getattr(self.process, seq) ))
1213 self.schedule.append(getattr(self.process,prefix))
1215 for i,s
in enumerate(seq.split(
'+')):
1217 setattr(self.process,sn,getattr(cms,what)( getattr(self.process, s) ))
1218 self.schedule.append(getattr(self.process,sn))
1232 """ Enrich the process with alca streams """ 1234 sequence = sequence.split(
'.')[-1]
1237 alcaList = sequence.split(
"+")
1239 from Configuration.AlCa.autoAlca
import autoAlca
1243 for name
in alcaConfig.__dict__:
1244 alcastream = getattr(alcaConfig,name)
1245 shortName = name.replace(
'ALCARECOStream',
'')
1246 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1247 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1248 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1249 self.AlCaPaths.append(shortName)
1250 if 'DQM' in alcaList:
1251 if not self._options.inlineEventContent
and hasattr(self.process,name):
1252 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1254 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1257 if self._options.hltProcess
or 'HLT' in self.stepMap:
1258 if isinstance(alcastream.paths,tuple):
1259 for path
in alcastream.paths:
1264 for i
in range(alcaList.count(shortName)):
1265 alcaList.remove(shortName)
1268 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1269 path = getattr(alcaConfig,name)
1270 self.schedule.append(path)
1271 alcaList.remove(
'DQM')
1273 if isinstance(alcastream,cms.Path):
1275 self.blacklist_paths.append(alcastream)
1278 if len(alcaList) != 0:
1280 for name
in alcaConfig.__dict__:
1281 alcastream = getattr(alcaConfig,name)
1282 if isinstance(alcastream,cms.FilteredStream):
1283 available.append(name.replace(
'ALCARECOStream',
''))
1284 print "The following alcas could not be found "+
str(alcaList)
1285 print "available ",available
1287 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1292 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1293 print "Loading lhe fragment from",loadFragment
1294 __import__(loadFragment)
1295 self.process.load(loadFragment)
1297 self._options.inlineObjets+=
','+sequence
1299 getattr(self.process,sequence).nEvents =
int(self._options.number)
1302 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1303 self.excludedPaths.append(
"lhe_step")
1304 self.schedule.append( self.process.lhe_step )
1307 """ load the fragment of generator configuration """ 1312 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'')
1314 if not '/' in loadFragment:
1315 loadFragment=
'Configuration.Generator.'+loadFragment
1317 loadFragment=loadFragment.replace(
'/',
'.')
1319 print "Loading generator fragment from",loadFragment
1320 __import__(loadFragment)
1324 if not (self._options.filein
or self._options.dasquery):
1325 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1328 generatorModule=sys.modules[loadFragment]
1329 genModules=generatorModule.__dict__
1332 if self.LHEDefaultSeq
in genModules:
1333 del genModules[self.LHEDefaultSeq]
1335 if self._options.hideGen:
1336 self.loadAndRemember(loadFragment)
1338 self.process.load(loadFragment)
1340 import FWCore.ParameterSet.Modules
as cmstypes
1341 for name
in genModules:
1342 theObject = getattr(generatorModule,name)
1343 if isinstance(theObject, cmstypes._Module):
1344 self._options.inlineObjets=name+
','+self._options.inlineObjets
1345 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1346 self._options.inlineObjets+=
','+name
1348 if sequence == self.GENDefaultSeq
or sequence ==
'pgen_genonly':
1349 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1350 self.productionFilterSequence =
'ProductionFilterSequence' 1351 elif 'generator' in genModules:
1352 self.productionFilterSequence =
'generator' 1354 """ Enrich the schedule with the rest of the generation step """ 1355 self.loadDefaultOrSpecifiedCFF(sequence,self.GENDefaultCFF)
1356 genSeqName=sequence.split(
'.')[-1]
1360 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1361 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1362 self.loadAndRemember(cffToBeLoaded)
1364 raise Exception(
"VertexSmearing type or beamspot "+self._options.beamspot+
" unknown.")
1366 if self._options.scenario ==
'HeavyIons':
1367 if self._options.pileup==
'HiMixGEN':
1368 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1370 self.loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1372 self.process.generation_step = cms.Path( getattr(self.process,genSeqName) )
1373 self.schedule.append(self.process.generation_step)
1376 self.executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1378 if 'reGEN' in self.stepMap:
1382 """ Enrich the schedule with the summary of the filter step """ 1384 self.loadAndRemember(
"GeneratorInterface/Core/genFilterSummary_cff")
1385 self.scheduleSequenceAtEnd(
'genFilterSummary',
'genfiltersummary_step')
1389 """ Enrich the schedule with the simulation step""" 1390 self.loadDefaultOrSpecifiedCFF(sequence,self.SIMDefaultCFF)
1391 if not self._options.fast:
1392 if self._options.gflash==
True:
1393 self.loadAndRemember(
"Configuration/StandardSequences/GFlashSIM_cff")
1395 if self._options.magField==
'0T':
1396 self.executeAndRemember(
"process.g4SimHits.UseMagneticField = cms.bool(False)")
1398 if self._options.magField==
'0T':
1399 self.executeAndRemember(
"process.famosSimHits.UseMagneticField = cms.bool(False)")
1401 self.scheduleSequence(sequence.split(
'.')[-1],
'simulation_step')
1405 """ Enrich the schedule with the digitisation step""" 1408 if self._options.gflash==
True:
1409 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1411 if sequence ==
'pdigi_valid':
1412 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1414 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and not self.process.source.type_()==
'EmptySource':
1415 if self._options.inputEventContent==
'':
1416 self._options.inputEventContent=
'REGEN' 1418 self._options.inputEventContent=self._options.inputEventContent+
',REGEN' 1425 """ Enrich the schedule with the digitisation step""" 1430 if sequence ==
'pdigi_valid':
1431 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersNoNoiseValid)")
1433 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersNoNoise)")
1439 """ Enrich the schedule with the digitisation step""" 1440 self.loadDefaultOrSpecifiedCFF(sequence,self.DIGIDefaultCFF)
1442 self.loadAndRemember(
"SimGeneral/MixingModule/digi_MixPreMix_cfi")
1445 if sequence ==
'pdigi_valid':
1446 self.executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMixValid)")
1448 self.executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersMixPreMix)")
1450 self.scheduleSequence(sequence.split(
'.')[-1],
'digitisation_step')
1454 """ Enrich the schedule with the crossing frame writer step""" 1460 """ Enrich the schedule with the digitisation step""" 1464 if self._options.pileup_input:
1466 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
1467 theFiles=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1468 elif self._options.pileup_input.startswith(
"filelist:"):
1469 theFiles= (
filesFromList(self._options.pileup_input[9:]))[0]
1471 theFiles=self._options.pileup_input.split(
',')
1473 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1480 if "DIGIPREMIX" in self.stepMap.keys():
1482 self.
executeAndRemember(
"process.SiStripDigiToRaw.FedReadoutMode = cms.string('PREMIX_RAW')")
1492 """ Enrich the schedule with the L1 simulation step""" 1493 assert(sequence ==
None)
1499 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1500 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT']
1501 if sequence
in supported:
1502 self.loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1503 if self._options.scenario ==
'HeavyIons':
1504 self.renameInputTagsInSequence(
"SimL1Emulator",
"rawDataCollector",
"rawDataRepacker")
1505 self.scheduleSequence(
'SimL1Emulator',
'L1RePack_step')
1507 print "L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported
1512 """ Enrich the schedule with the HLT simulation step""" 1514 print "no specification of the hlt menu has been given, should never happen" 1515 raise Exception(
'no HLT sequence provided')
1519 from Configuration.HLT.autoHLT
import autoHLT
1522 sequence = autoHLT[key]
1524 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1530 if self._options.scenario ==
'HeavyIons':
1531 optionsForHLT[
'type'] =
'HIon' 1533 optionsForHLT[
'type'] =
'GRun' 1534 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in optionsForHLT.iteritems())
1535 if sequence ==
'run,fromSource':
1536 if hasattr(self.process.source,
'firstRun'):
1537 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1538 elif hasattr(self.process.source,
'setRunNumber'):
1539 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1541 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1543 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1547 if self._options.isMC:
1548 self._options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1550 if self._options.name !=
'HLT':
1551 self.additionalCommands.append(
'from HLTrigger.Configuration.CustomConfigs import ProcessName')
1552 self.additionalCommands.append(
'process = ProcessName(process)')
1553 self.additionalCommands.append(
'')
1554 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1557 self.schedule.append(self.process.HLTSchedule)
1558 [self.blacklist_paths.append(path)
for path
in self.process.HLTSchedule
if isinstance(path,(cms.Path,cms.EndPath))]
1561 if self._options.fast:
1562 if not hasattr(self.process,
'HLTEndSequence'):
1563 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1568 seqReco=sequence.split(
',')[1]
1569 seqDigi=sequence.split(
',')[0]
1571 print "RAW2RECO requires two specifications",sequence,
"insufficient" 1585 self.
loadAndRemember(
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1587 for filt
in allMetFilterPaths:
1588 self.schedule.append(getattr(self.process,
'Flag_'+filt))
1591 ''' Enrich the schedule with L1 HW validation ''' 1594 print '\n\n\n DEPRECATED this has no action \n\n\n' 1598 ''' Enrich the schedule with L1 reconstruction ''' 1604 ''' Enrich the schedule with L1 reconstruction ''' 1610 ''' Enrich the schedule with a user defined filter sequence ''' 1612 filterConfig=self.load(sequence.split(
'.')[0])
1613 filterSeq=sequence.split(
'.')[-1]
1621 label=visitee.label()
1629 getattr(self.process,filterSeq).
visit( expander )
1630 self._options.inlineObjets+=
','+expander.inliner
1631 self._options.inlineObjets+=
','+filterSeq
1634 self.scheduleSequence(filterSeq,
'filtering_step')
1635 self.nextScheduleIsConditional=
True 1637 self.productionFilterSequence = filterSeq
1642 ''' Enrich the schedule with reconstruction ''' 1648 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1649 if not self._options.fast:
1650 print "ERROR: this step is only implemented for FastSim" 1653 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1657 ''' Enrich the schedule with PAT ''' 1658 self.prepare_PATFILTER(self)
1659 self.loadDefaultOrSpecifiedCFF(sequence,self.PATDefaultCFF,1)
1660 if not self._options.runUnscheduled:
1661 raise Exception(
"MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1662 if self._options.isData:
1663 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1665 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1666 if self._options.fast:
1667 self._options.customisation_file_unsch.insert(1,
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff.miniAOD_customizeMETFiltersFastSim")
1669 if self._options.hltProcess:
1670 if len(self._options.customise_commands) > 1:
1671 self._options.customise_commands = self._options.customise_commands +
" \n" 1672 self._options.customise_commands = self._options.customise_commands +
"process.patTrigger.processName = \""+self._options.hltProcess+
"\"" 1678 ''' Enrich the schedule with event interpretation ''' 1679 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1680 if sequence
in EventInterpretation:
1681 self.EIDefaultCFF = EventInterpretation[sequence]
1682 sequence =
'EIsequence' 1684 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1685 self.loadDefaultOrSpecifiedCFF(sequence,self.EIDefaultCFF)
1686 self.scheduleSequence(sequence.split(
'.')[-1],
'eventinterpretaion_step')
1690 ''' Enrich the schedule with skimming fragments''' 1692 sequence = sequence.split(
'.')[-1]
1694 skimlist=sequence.split(
'+')
1696 from Configuration.Skimming.autoSkim
import autoSkim
1700 for skim
in skimConfig.__dict__:
1701 skimstream = getattr(skimConfig,skim)
1702 if isinstance(skimstream,cms.Path):
1704 self.blacklist_paths.append(skimstream)
1705 if (
not isinstance(skimstream,cms.FilteredStream)):
1707 shortname = skim.replace(
'SKIMStream',
'')
1708 if (sequence==
"all"):
1710 elif (shortname
in skimlist):
1713 if self._options.datatier==
'DQM':
1714 self.process.load(self.EVTCONTDefaultCFF)
1715 skimstreamDQM = cms.FilteredStream(
1716 responsible = skimstream.responsible,
1717 name = skimstream.name+
'DQM',
1718 paths = skimstream.paths,
1719 selectEvents = skimstream.selectEvents,
1720 content = self._options.datatier+
'EventContent',
1721 dataTier = cms.untracked.string(self._options.datatier)
1724 for i
in range(skimlist.count(shortname)):
1725 skimlist.remove(shortname)
1729 if (skimlist.__len__()!=0
and sequence!=
"all"):
1730 print 'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist)
1731 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1734 ''' Enrich the schedule with a user defined sequence ''' 1740 """ Enrich the schedule with the postreco step """ 1747 print sequence,
"in preparing validation" 1749 from Validation.Configuration.autoValidation
import autoValidation
1751 sequence=sequence.split(
'.')[-1]
1752 if sequence.find(
',')!=-1:
1753 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1754 valSeqName=sequence.split(
',')[1].
split(
'+')
1759 prevalSeqName=sequence.split(
'+')
1760 valSeqName=sequence.split(
'+')
1766 postfix=
'_'+sequence
1767 prevalSeqName=[
'prevalidation'+postfix]
1768 valSeqName=[
'validation'+postfix]
1769 if not hasattr(self.process,valSeqName[0]):
1771 valSeqName=[sequence]
1782 if (
'HLT' in self.stepMap
and not self._options.fast)
or self._options.hltProcess:
1783 for s
in valSeqName+prevalSeqName:
1786 for (i,s)
in enumerate(prevalSeqName):
1788 setattr(self.process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.process, s)) )
1789 self.schedule.append(getattr(self.process,
'prevalidation_step%s'%NFI(i)))
1791 for (i,s)
in enumerate(valSeqName):
1792 setattr(self.process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.process, s)))
1793 self.schedule.append(getattr(self.process,
'validation_step%s'%NFI(i)))
1796 if 'PAT' in self.stepMap
and not 'RECO' in self.stepMap:
1799 if not 'DIGI' in self.stepMap
and not self._options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1800 if self._options.restoreRNDSeeds==
False and not self._options.restoreRNDSeeds==
True:
1801 self._options.restoreRNDSeeds=
True 1803 if not 'DIGI' in self.stepMap
and not self._options.fast:
1807 self._options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1809 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1811 for (i,s)
in enumerate(valSeqName):
1812 getattr(self.process,
'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.process,
'validation_step%s'%NFI(i))._seq
1818 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1819 It will climb down within PSets, VPSets and VInputTags to find its target""" 1820 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1827 if isinstance(pset, cms._Parameterizable):
1828 for name
in pset.parameters_().
keys():
1834 value = getattr(pset,name)
1835 type = value.pythonTypeName()
1836 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1837 self.
doIt(value,base+
"."+name)
1838 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1839 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1840 elif type
in (
'cms.string',
'cms.untracked.string'):
1842 if self.
_verbose:
print "set string process name %s.%s %s ==> %s"% (base, name, value, self.
_paramReplace)
1844 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1845 for (i,n)
in enumerate(value):
1846 if not isinstance(n, cms.InputTag):
1850 if self.
_verbose:
print "set process name %s.%s[%d] %s ==> %s " % (base, name, i, n, self.
_paramReplace)
1853 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1854 for (i,n)
in enumerate(value):
1857 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1859 if self.
_verbose:
print "set process name %s.%s %s ==> %s " % (base, name, value, self.
_paramReplace)
1860 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1865 label = visitee.label()
1866 except AttributeError:
1867 label =
'<Module not in a Process>' 1869 label =
'other execption' 1870 self.
doIt(visitee, label)
1877 print "Replacing all InputTag %s => %s"%(oldT,newT)
1880 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1881 if not loadMe
in self.additionalCommands:
1882 self.additionalCommands.append(loadMe)
1883 self.additionalCommands.append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1887 if self._options.hltProcess:
1888 proc=self._options.hltProcess
1890 proc=self.process.name_()
1891 if proc==HLTprocess:
return 1893 print "replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc)
1895 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.additionalCommands:
1896 self.additionalCommands.append(
'from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1897 self.additionalCommands.append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1903 while '@' in repr(seqList)
and level<maxLevel:
1905 for specifiedCommand
in seqList:
1906 if specifiedCommand.startswith(
'@'):
1907 location=specifiedCommand[1:]
1908 if not location
in mapping:
1909 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1910 mappedTo=mapping[location]
1912 mappedTo=mappedTo[index]
1913 seqList.remove(specifiedCommand)
1914 seqList.extend(mappedTo.split(
'+'))
1917 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1923 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1924 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1925 from DQMOffline.Configuration.autoDQM
import autoDQM
1929 if len(set(sequenceList))!=len(sequenceList):
1930 sequenceList=
list(set(sequenceList))
1931 print "Duplicate entries for DQM:, using",sequenceList
1933 pathName=
'dqmoffline_step' 1934 for (i,sequence)
in enumerate(sequenceList):
1936 pathName=
'dqmoffline_%d_step'%(i)
1938 if 'HLT' in self.stepMap.keys()
or self._options.hltProcess:
1941 setattr(self.process,pathName, cms.EndPath( getattr(self.process,sequence ) ) )
1942 self.schedule.append(getattr(self.process,pathName))
1944 if hasattr(self.process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1946 getattr(self.process,pathName).
insert(0,self.process.genstepfilter)
1948 pathName=
'dqmofflineOnPAT_step' 1949 for (i,sequence)
in enumerate(postSequenceList):
1951 pathName=
'dqmofflineOnPAT_%d_step'%(i)
1953 setattr(self.process,pathName, cms.EndPath( getattr(self.process, sequence ) ) )
1954 self.schedule.append(getattr(self.process,pathName))
1957 """ Enrich the process with harvesting step """ 1958 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self._options.harvesting+
'_cff' 1962 sequence = sequence.split(
'.')[-1]
1965 harvestingList = sequence.split(
"+")
1966 from DQMOffline.Configuration.autoDQM
import autoDQM
1967 from Validation.Configuration.autoValidation
import autoValidation
1969 combined_mapping = copy.deepcopy( autoDQM )
1970 combined_mapping.update( autoValidation )
1971 self.
expandMapping(harvestingList,combined_mapping,index=-1)
1973 if len(set(harvestingList))!=len(harvestingList):
1974 harvestingList=
list(set(harvestingList))
1975 print "Duplicate entries for HARVESTING, using",harvestingList
1977 for name
in harvestingList:
1978 if not name
in harvestingConfig.__dict__:
1979 print name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys()
1981 harvestingstream = getattr(harvestingConfig,name)
1982 if isinstance(harvestingstream,cms.Path):
1983 self.schedule.append(harvestingstream)
1984 self.blacklist_paths.append(harvestingstream)
1985 if isinstance(harvestingstream,cms.Sequence):
1986 setattr(self.process,name+
"_step",cms.Path(harvestingstream))
1987 self.schedule.append(getattr(self.process,name+
"_step"))
1993 """ Enrich the process with AlCaHarvesting step """ 1995 sequence=sequence.split(
".")[-1]
1998 harvestingList = sequence.split(
"+")
2002 from Configuration.AlCa.autoPCL
import autoPCL
2005 for name
in harvestingConfig.__dict__:
2006 harvestingstream = getattr(harvestingConfig,name)
2007 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2008 self.schedule.append(harvestingstream)
2009 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2010 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2011 harvestingList.remove(name)
2013 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2014 self.schedule.append(lastStep)
2016 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2017 print "The following harvesting could not be found : ", harvestingList
2018 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2028 self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2029 self.schedule.append(self.process.reconstruction)
2033 """ Add useful info for the production. """ 2034 self.process.configurationMetadata=cms.untracked.PSet\
2035 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2036 name=cms.untracked.string(
"Applications"),
2037 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2040 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2044 """ Prepare the configuration string and add missing pieces.""" 2056 outputModuleCfgCode=
"" 2057 if not 'HARVESTING' in self.stepMap.keys()
and not 'ALCAHARVEST' in self.stepMap.keys()
and not 'ALCAOUTPUT' in self.stepMap.keys()
and self.with_output:
2062 self.pythonCfgCode =
"# Auto generated configuration file\n" 2063 self.pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2064 self.pythonCfgCode +=
"# with command line options: "+self._options.arguments+
'\n' 2065 self.pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2066 if hasattr(self.
_options,
"era")
and self._options.era :
2067 self.pythonCfgCode +=
"from Configuration.StandardSequences.Eras import eras\n\n" 2068 self.pythonCfgCode +=
"process = cms.Process('"+self.process.name_()+
"'" 2070 for requestedEra
in self._options.era.split(
",") :
2071 self.pythonCfgCode +=
",eras."+requestedEra
2072 self.pythonCfgCode +=
")\n\n" 2074 self.pythonCfgCode +=
"process = cms.Process('"+self.process.name_()+
"')\n\n" 2076 self.pythonCfgCode +=
"# import of standard configurations\n" 2077 for module
in self.imports:
2078 self.pythonCfgCode += (
"process.load('"+module+
"')\n")
2081 if not hasattr(self.process,
"configurationMetadata"):
2085 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2087 self.pythonCfgCode +=
"\n" 2088 for comment,object
in self.addedObjects:
2090 self.pythonCfgCode +=
"\n# "+comment+
"\n" 2091 self.pythonCfgCode +=
dumpPython(self.process,object)
2094 self.pythonCfgCode +=
"\n# Output definition\n" 2095 self.pythonCfgCode += outputModuleCfgCode
2098 self.pythonCfgCode +=
"\n# Additional output definition\n" 2100 nl=self.additionalOutputs.keys()
2103 output = self.additionalOutputs[name]
2104 self.pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2105 tmpOut = cms.EndPath(output)
2106 setattr(self.process,name+
'OutPath',tmpOut)
2107 self.schedule.append(tmpOut)
2110 self.pythonCfgCode +=
"\n# Other statements\n" 2111 for command
in self.additionalCommands:
2112 self.pythonCfgCode += command +
"\n" 2115 for object
in self._options.inlineObjets.split(
','):
2118 if not hasattr(self.process,object):
2119 print 'cannot inline -'+object+
'- : not known' 2121 self.pythonCfgCode +=
'\n' 2122 self.pythonCfgCode +=
dumpPython(self.process,object)
2125 self.pythonCfgCode +=
"\n# Path and EndPath definitions\n" 2126 for path
in self.process.paths:
2127 if getattr(self.process,path)
not in self.blacklist_paths:
2128 self.pythonCfgCode +=
dumpPython(self.process,path)
2130 for endpath
in self.process.endpaths:
2131 if getattr(self.process,endpath)
not in self.blacklist_paths:
2132 self.pythonCfgCode +=
dumpPython(self.process,endpath)
2135 self.pythonCfgCode +=
"\n# Schedule definition\n" 2136 result =
"process.schedule = cms.Schedule(" 2139 self.process.schedule = cms.Schedule()
2140 for item
in self.schedule:
2141 if not isinstance(item, cms.Schedule):
2142 self.process.schedule.append(item)
2144 self.process.schedule.extend(item)
2146 if hasattr(self.process,
"HLTSchedule"):
2147 beforeHLT = self.schedule[:self.schedule.index(self.process.HLTSchedule)]
2148 afterHLT = self.schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2149 pathNames = [
'process.'+p.label_()
for p
in beforeHLT]
2150 result +=
','.
join(pathNames)+
')\n' 2151 result +=
'process.schedule.extend(process.HLTSchedule)\n' 2152 pathNames = [
'process.'+p.label_()
for p
in afterHLT]
2153 result +=
'process.schedule.extend(['+
','.
join(pathNames)+
'])\n' 2155 pathNames = [
'process.'+p.label_()
for p
in self.schedule]
2156 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2158 self.pythonCfgCode += result
2160 if self._options.nThreads
is not "1":
2161 self.pythonCfgCode +=
"\n" 2162 self.pythonCfgCode +=
"#Setup FWK for multithreaded\n" 2163 self.pythonCfgCode +=
"process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+
")\n" 2164 self.pythonCfgCode +=
"process.options.numberOfStreams=cms.untracked.uint32(0)\n" 2166 if self._options.isRepacked:
2167 self.pythonCfgCode +=
"\n" 2168 self.pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2169 self.pythonCfgCode +=
"MassReplaceInputTag(process)\n" 2173 if self.productionFilterSequence:
2174 self.pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2175 self.pythonCfgCode +=
'for path in process.paths:\n' 2176 if len(self.conditionalPaths):
2177 self.pythonCfgCode +=
'\tif not path in %s: continue\n'%
str(self.conditionalPaths)
2178 if len(self.excludedPaths):
2179 self.pythonCfgCode +=
'\tif path in %s: continue\n'%
str(self.excludedPaths)
2180 self.pythonCfgCode +=
'\tgetattr(process,path)._seq = process.%s * getattr(process,path)._seq \n'%(self.productionFilterSequence,)
2181 pfs = getattr(self.process,self.productionFilterSequence)
2182 for path
in self.process.paths:
2183 if not path
in self.conditionalPaths:
continue 2184 if path
in self.excludedPaths:
continue 2185 getattr(self.process,path)._seq = pfs * getattr(self.process,path)._seq
2191 if self._options.runUnscheduled:
2194 self.pythonCfgCode+=
"#do not add changes to your config after this point (unless you know what you are doing)\n" 2195 self.pythonCfgCode+=
"from FWCore.ParameterSet.Utilities import convertToUnscheduled\n" 2196 self.pythonCfgCode+=
"process=convertToUnscheduled(process)\n" 2198 from FWCore.ParameterSet.Utilities
import convertToUnscheduled
2202 for module
in self.importsUnsch:
2203 self.process.load(module)
2204 self.pythonCfgCode += (
"process.load('"+module+
"')\n")
2207 self.pythonCfgCode+=
"from FWCore.ParameterSet.Utilities import cleanUnscheduled\n" 2208 self.pythonCfgCode+=
"process=cleanUnscheduled(process)\n" 2210 from FWCore.ParameterSet.Utilities
import cleanUnscheduled
2221 self.pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2222 self.pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2223 self.pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2224 self.pythonCfgCode +=
"# End adding early deletion\n" 2225 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2231 if self._options.io:
2233 if not self._options.io.endswith(
'.io'): self._option.io+=
'.io' 2234 io=open(self._options.io,
'w')
2236 if hasattr(self.process.source,
"fileNames"):
2237 if len(self.process.source.fileNames.value()):
2238 ioJson[
'primary']=self.process.source.fileNames.value()
2239 if hasattr(self.process.source,
"secondaryFileNames"):
2240 if len(self.process.source.secondaryFileNames.value()):
2241 ioJson[
'secondary']=self.process.source.secondaryFileNames.value()
2242 if self._options.pileup_input
and (self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:')):
2243 ioJson[
'pileup']=self._options.pileup_input[4:]
2244 for (o,om)
in self.process.outputModules_().
items():
2245 ioJson[o]=om.fileName.value()
2246 ioJson[
'GT']=self.process.GlobalTag.globaltag.value()
2247 if self.productionFilterSequence:
2248 ioJson[
'filter']=self.productionFilterSequence
2250 io.write(json.dumps(ioJson))
def load(self, includeFile)
def filesFromOption(self)
def cleanUnscheduled(proc)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF, unsch=0)
bool any(const std::vector< T > &v, const T &what)
def prepare_L1REPACK(self, sequence=None)
def finalizeFastSimHLT(self)
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def doNotInlineEventContent(instance, label="cms.untracked.vstring(process."+theStreamType+"EventContent.outputCommands)")
event content
def customiseEarlyDelete(process)
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_DIGIPREMIX(self, sequence=None)
def loadAndRemember(self, includeFile, unsch=0)
def prepare_RECO(self, sequence="reconstruction")
put the filtering path in the schedule
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def addCustomise(self, unsch=0)
def prepare_DIGIPREMIX_S2(self, sequence=None)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
now for a useful command
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, sequence=None)
def convertToUnscheduled(proc)
def prepare_DIGI2RAW(self, sequence=None)
def MassReplaceInputTag(aProcess, oldT="rawDataCollector", newT="rawDataRepacker")
def throwAndSetRandomRun(source, runsAndProbs)
inliner
needs to be in reverse order
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
def prepare_ALCAHARVEST(self, sequence=None)
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def doIt(self, pset, base)
def completeInputCommand(self)
add the corresponding input content
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def addStandardSequences(self)
def prepare_PAT(self, sequence="miniAOD")