3 from __future__
import print_function
4 __version__ =
"$Revision: 1.19 $" 5 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 7 import FWCore.ParameterSet.Config
as cms
8 from FWCore.ParameterSet.Modules
import _Module
13 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
18 from subprocess
import Popen,PIPE
19 import FWCore.ParameterSet.DictTypes
as DictTypes
25 defaultOptions.datamix =
'DataOnSim' 26 defaultOptions.isMC=
False 27 defaultOptions.isData=
True 28 defaultOptions.step=
'' 29 defaultOptions.pileup=
'NoPileUp' 30 defaultOptions.pileup_input =
None 31 defaultOptions.pileup_dasoption =
'' 32 defaultOptions.geometry =
'SimDB' 33 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
34 defaultOptions.magField =
'' 35 defaultOptions.conditions =
None 36 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
37 defaultOptions.harvesting=
'AtRunEnd' 38 defaultOptions.gflash =
False 39 defaultOptions.number = -1
40 defaultOptions.number_out =
None 41 defaultOptions.arguments =
"" 42 defaultOptions.name =
"NO NAME GIVEN" 43 defaultOptions.evt_type =
"" 44 defaultOptions.filein =
"" 45 defaultOptions.dasquery=
"" 46 defaultOptions.dasoption=
"" 47 defaultOptions.secondfilein =
"" 48 defaultOptions.customisation_file = []
49 defaultOptions.customisation_file_unsch = []
50 defaultOptions.customise_commands =
"" 51 defaultOptions.inline_custom=
False 52 defaultOptions.particleTable =
'pythiapdt' 53 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
54 defaultOptions.dirin =
'' 55 defaultOptions.dirout =
'' 56 defaultOptions.filetype =
'EDM' 57 defaultOptions.fileout =
'output.root' 58 defaultOptions.filtername =
'' 59 defaultOptions.lazy_download =
False 60 defaultOptions.custom_conditions =
'' 61 defaultOptions.hltProcess =
'' 62 defaultOptions.eventcontent =
None 63 defaultOptions.datatier =
None 64 defaultOptions.inlineEventContent =
True 65 defaultOptions.inlineObjets =
'' 66 defaultOptions.hideGen=
False 67 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
68 defaultOptions.beamspot=
None 69 defaultOptions.outputDefinition =
'' 70 defaultOptions.inputCommands =
None 71 defaultOptions.outputCommands =
None 72 defaultOptions.inputEventContent =
'' 73 defaultOptions.dropDescendant =
False 74 defaultOptions.relval =
None 75 defaultOptions.profile =
None 76 defaultOptions.isRepacked =
False 77 defaultOptions.restoreRNDSeeds =
False 78 defaultOptions.donotDropOnInput =
'' 79 defaultOptions.python_filename =
'' 80 defaultOptions.io=
None 81 defaultOptions.lumiToProcess=
None 82 defaultOptions.fast=
False 83 defaultOptions.runsAndWeightsForMC =
None 84 defaultOptions.runsScenarioForMC =
None 85 defaultOptions.runUnscheduled =
False 86 defaultOptions.timeoutOutput =
False 87 defaultOptions.nThreads =
'1' 91 theObject = getattr(process,name)
92 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
93 return "process."+name+
" = " + theObject.dumpPython(
"process")
94 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
95 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 97 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 100 import FWCore.ParameterSet.Config
as cms
103 for line
in open(fileName,
'r'): 104 if line.count(
".root")>=2:
106 entries=line.replace(
"\n",
"").
split()
107 if not entries[0]
in prim:
108 prim.append(entries[0])
109 if not entries[1]
in sec:
110 sec.append(entries[1])
111 elif (line.find(
".root")!=-1):
112 entry=line.replace(
"\n",
"")
113 if not entry
in prim:
116 if not hasattr(s,
"fileNames"):
117 s.fileNames=cms.untracked.vstring(prim)
119 s.fileNames.extend(prim)
121 if not hasattr(s,
"secondaryFileNames"):
122 s.secondaryFileNames=cms.untracked.vstring(sec)
124 s.secondaryFileNames.extend(sec)
125 print(
"found files: ",prim)
127 raise Exception(
"There are not files in input from the file list")
129 print(
"found parent files:",sec)
134 import FWCore.ParameterSet.Config
as cms
137 print(
"the query is",query)
140 while eC!=0
and count<3:
142 print(
'Sleeping, then retrying DAS')
144 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True)
146 tupleP = os.waitpid(p.pid, 0)
150 print(
"DAS succeeded after",count,
"attempts",eC)
152 print(
"DAS failed 3 times- I give up")
153 for line
in pipe.split(
'\n'):
154 if line.count(
".root")>=2:
156 entries=line.replace(
"\n",
"").
split()
157 if not entries[0]
in prim:
158 prim.append(entries[0])
159 if not entries[1]
in sec:
160 sec.append(entries[1])
161 elif (line.find(
".root")!=-1):
162 entry=line.replace(
"\n",
"")
163 if not entry
in prim:
166 if not hasattr(s,
"fileNames"):
167 s.fileNames=cms.untracked.vstring(prim)
169 s.fileNames.extend(prim)
171 if not hasattr(s,
"secondaryFileNames"):
172 s.secondaryFileNames=cms.untracked.vstring(sec)
174 s.secondaryFileNames.extend(sec)
175 print(
"found files: ",prim)
177 print(
"found parent files:",sec)
180 def anyOf(listOfKeys,dict,opt=None):
189 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
192 """The main building routines """ 194 def __init__(self, options, process = None, with_output = False, with_input = False ):
195 """options taken from old cmsDriver and optparse """ 197 options.outfile_name = options.dirout+options.fileout
201 if self._options.isData
and options.isMC:
202 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
207 if 'ENDJOB' in self._options.step:
208 if (hasattr(self.
_options,
"outputDefinition")
and \
209 self._options.outputDefinition !=
'' and \
210 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self._options.outputDefinition)))
or \
211 (hasattr(self.
_options,
"datatier")
and \
212 self._options.datatier
and \
213 'DQMIO' in self._options.datatier):
214 print(
"removing ENDJOB from steps since not compatible with DQMIO dataTier")
215 self._options.step=self._options.step.replace(
',ENDJOB',
'')
220 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
223 for step
in self._options.step.split(
","):
224 if step==
'':
continue 225 stepParts = step.split(
":")
226 stepName = stepParts[0]
227 if stepName
not in stepList
and not stepName.startswith(
're'):
228 raise ValueError(
"Step "+stepName+
" unknown")
229 if len(stepParts)==1:
231 elif len(stepParts)==2:
233 elif len(stepParts)==3:
234 self.
stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
236 raise ValueError(
"Step definition "+step+
" invalid")
237 self.stepKeys.append(stepName)
244 if hasattr(self.
_options,
"no_output_flag")
and self._options.no_output_flag:
271 Function to add the igprof profile service so that you can dump in the middle 274 profileOpts = self._options.profile.split(
':')
276 profilerInterval = 100
277 profilerFormat =
None 278 profilerJobFormat =
None 284 startEvent = profileOpts.pop(0)
285 if not startEvent.isdigit():
286 raise Exception(
"%s is not a number" % startEvent)
287 profilerStart =
int(startEvent)
289 eventInterval = profileOpts.pop(0)
290 if not eventInterval.isdigit():
291 raise Exception(
"%s is not a number" % eventInterval)
292 profilerInterval =
int(eventInterval)
294 profilerFormat = profileOpts.pop(0)
297 if not profilerFormat:
298 profilerFormat =
"%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace(
"_cfi",
""),
300 self._options.pileup,
301 self._options.conditions,
302 self._options.datatier,
303 self._options.profileTypeLabel)
304 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
305 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
306 elif not profilerJobFormat:
307 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 309 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
312 includeFile = includeFile.replace(
'/',
'.')
313 self.process.load(includeFile)
314 return sys.modules[includeFile]
317 """helper routine to load am memorize imports""" 320 includeFile = includeFile.replace(
'/',
'.')
321 self.imports.append(includeFile)
322 self.process.load(includeFile)
323 return sys.modules[includeFile]
326 """helper routine to remember replace statements""" 327 self.additionalCommands.append(command)
328 if not command.strip().startswith(
"#"):
331 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
335 if 'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys():
336 self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring(
'ProductNotFound'),fileMode = cms.untracked.string(
'FULLMERGE'))
338 self.process.options = cms.untracked.PSet( )
340 self.addedObjects.append((
"",
"options"))
342 if self._options.lazy_download:
343 self.process.AdaptorConfig = cms.Service(
"AdaptorConfig",
344 stats = cms.untracked.bool(
True),
345 enable = cms.untracked.bool(
True),
346 cacheHint = cms.untracked.string(
"lazy-download"),
347 readHint = cms.untracked.string(
"read-ahead-buffered")
349 self.addedObjects.append((
"Setup lazy download",
"AdaptorConfig"))
354 if self._options.profile:
356 self.process.IgProfService = cms.Service(
"IgProfService",
357 reportFirstEvent = cms.untracked.int32(start),
358 reportEventInterval = cms.untracked.int32(interval),
359 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
360 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
361 self.addedObjects.append((
"Setup IGProf Service for profiling",
"IgProfService"))
364 """Here we decide how many evts will be processed""" 365 self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(
int(self._options.number)))
366 if self._options.number_out:
367 self.process.maxEvents.output = cms.untracked.int32(
int(self._options.number_out))
368 self.addedObjects.append((
"",
"maxEvents"))
371 """Here the source is built. Priority: file, generator""" 372 self.addedObjects.append((
"Input source",
"source"))
374 def filesFromOption(self):
375 for entry
in self._options.filein.split(
','):
377 if entry.startswith(
"filelist:"):
379 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
380 filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
382 self.process.source.fileNames.append(self._options.dirin+entry)
383 if self._options.secondfilein:
384 if not hasattr(self.process.source,
"secondaryFileNames"):
385 raise Exception(
"--secondfilein not compatible with "+self._options.filetype+
"input type")
386 for entry
in self._options.secondfilein.split(
','):
388 if entry.startswith(
"filelist:"):
389 self.process.source.secondaryFileNames.extend((
filesFromList(entry[9:]))[0])
390 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
391 self.process.source.secondaryFileNames.extend((
filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
393 self.process.source.secondaryFileNames.append(self._options.dirin+entry)
395 if self._options.filein
or self._options.dasquery:
396 if self._options.filetype ==
"EDM":
397 self.process.source=cms.Source(
"PoolSource",
398 fileNames = cms.untracked.vstring(),
399 secondaryFileNames= cms.untracked.vstring())
400 filesFromOption(self)
401 elif self._options.filetype ==
"DAT":
402 self.process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
403 filesFromOption(self)
404 elif self._options.filetype ==
"LHE":
405 self.process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
406 if self._options.filein.startswith(
"lhe:"):
408 args=self._options.filein.split(
':')
410 print(
'LHE input from article ',article)
411 location=
'/store/lhe/' 413 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
414 for line
in textOfFiles:
415 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
416 self.process.source.fileNames.append(location+article+
'/'+fileName)
419 print(
'Issue to load LHE files, please check and try again.')
422 if len(self.process.source.fileNames)==0:
423 print(
'Issue with empty filename, but can pass line check')
426 self.process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
428 filesFromOption(self)
430 elif self._options.filetype ==
"DQM":
431 self.process.source=cms.Source(
"DQMRootSource",
432 fileNames = cms.untracked.vstring())
433 filesFromOption(self)
435 elif self._options.filetype ==
"DQMDAQ":
437 self.process.source=cms.Source(
"DQMStreamerReader")
440 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
441 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
443 if self._options.dasquery!=
'':
444 self.process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
445 filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
447 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
448 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
451 if 'GEN' in self.stepMap.keys():
452 if self._options.inputCommands:
453 self._options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 455 self._options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 457 if self.process.source
and self._options.inputCommands:
458 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
459 for command
in self._options.inputCommands.split(
','):
461 command = command.strip()
462 if command==
'':
continue 463 self.process.source.inputCommands.append(command)
464 if not self._options.dropDescendant:
465 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
467 if self._options.lumiToProcess:
468 import FWCore.PythonUtilities.LumiList
as LumiList
469 self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange(
LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().
split(
',') )
471 if 'GEN' in self.stepMap.keys()
or 'LHE' in self.
stepMap or (
not self._options.filein
and hasattr(self.
_options,
"evt_type")):
472 if self.process.source
is None:
473 self.process.source=cms.Source(
"EmptySource")
477 if self._options.runsAndWeightsForMC
or self._options.runsScenarioForMC :
478 if not self._options.isMC :
479 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
480 if self._options.runsAndWeightsForMC:
483 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
484 if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
485 __import__(RunsAndWeights[self._options.runsScenarioForMC])
486 self.
runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
488 self.
runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
491 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
493 self.additionalCommands.append(
'import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
494 self.additionalCommands.append(
'ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.
runsAndWeights))
499 """ Add output module to the process """ 501 if self._options.outputDefinition:
502 if self._options.datatier:
503 print(
"--datatier & --eventcontent options ignored")
506 outList = eval(self._options.outputDefinition)
507 for (id,outDefDict)
in enumerate(outList):
508 outDefDictStr=outDefDict.__str__()
509 if not isinstance(outDefDict,dict):
510 raise Exception(
"--output needs to be passed a list of dict"+self._options.outputDefinition+
" is invalid")
512 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
515 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
516 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
517 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
518 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
519 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
521 if not theModuleLabel:
522 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
523 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
524 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 526 for name
in tryNames:
527 if not hasattr(self.
process,name):
530 if not theModuleLabel:
531 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
533 defaultFileName=self._options.outfile_name
535 defaultFileName=self._options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
537 theFileName=self._options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
538 if not theFileName.endswith(
'.root'):
542 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
543 if theStreamType==
'DQMIO': theStreamType=
'DQM' 544 if theStreamType==
'ALL':
545 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
547 theEventContent = getattr(self.
process, theStreamType+
"EventContent")
551 if theStreamType==
'ALCARECO' and not theFilterName:
552 theFilterName=
'StreamALCACombined' 555 CppType=
'PoolOutputModule' 556 if self._options.timeoutOutput:
557 CppType=
'TimeoutPoolOutputModule' 558 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 559 output = cms.OutputModule(CppType,
560 theEventContent.clone(),
561 fileName = cms.untracked.string(theFileName),
562 dataset = cms.untracked.PSet(
563 dataTier = cms.untracked.string(theTier),
564 filterName = cms.untracked.string(theFilterName))
566 if not theSelectEvent
and hasattr(self.
process,
'generation_step')
and theStreamType!=
'LHE':
567 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
568 if not theSelectEvent
and hasattr(self.
process,
'filtering_step'):
569 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
571 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
574 if not hasattr(output,
'SelectEvents'):
575 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
577 output.SelectEvents.SelectEvents.extend(getattr(self.
process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
580 if hasattr(self.
process,theModuleLabel):
581 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
583 setattr(self.
process,theModuleLabel,output)
584 outputModule=getattr(self.
process,theModuleLabel)
585 setattr(self.
process,theModuleLabel+
'_step',cms.EndPath(outputModule))
586 path=getattr(self.
process,theModuleLabel+
'_step')
587 self.schedule.append(path)
589 if not self._options.inlineEventContent
and hasattr(self.
process,theStreamType+
"EventContent"):
590 def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process.
"+theStreamType+"EventContent.outputCommands)
"): 592 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
593 if theExtraOutputCommands:
594 if not isinstance(theExtraOutputCommands,list):
595 raise Exception(
"extra ouput command in --option must be a list of strings")
596 if hasattr(self.
process,theStreamType+
"EventContent"):
597 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
599 outputModule.outputCommands.extend(theExtraOutputCommands)
601 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
606 streamTypes=self._options.eventcontent.split(
',')
607 tiers=self._options.datatier.split(
',')
608 if not self._options.outputDefinition
and len(streamTypes)!=len(tiers):
609 raise Exception(
"number of event content arguments does not match number of datatier arguments")
612 if self._options.step.split(
',')[0].
split(
':')[0] ==
'ALCA':
615 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
616 if streamType==
'':
continue 617 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self._options.step:
continue 618 if streamType==
'DQMIO': streamType=
'DQM' 619 eventContent=streamType
621 if streamType ==
"NANOEDMAOD" :
622 eventContent =
"NANOAOD" 623 elif streamType ==
"NANOEDMAODSIM" :
624 eventContent =
"NANOAODSIM" 625 theEventContent = getattr(self.
process, eventContent+
"EventContent")
627 theFileName=self._options.outfile_name
628 theFilterName=self._options.filtername
630 theFileName=self._options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
631 theFilterName=self._options.filtername
632 CppType=
'PoolOutputModule' 633 if self._options.timeoutOutput:
634 CppType=
'TimeoutPoolOutputModule' 635 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 636 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 637 output = cms.OutputModule(CppType,
639 fileName = cms.untracked.string(theFileName),
640 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
641 filterName = cms.untracked.string(theFilterName)
644 if hasattr(self.
process,
"generation_step")
and streamType!=
'LHE':
645 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
646 if hasattr(self.
process,
"filtering_step"):
647 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
649 if streamType==
'ALCARECO':
650 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
652 if "MINIAOD" in streamType:
653 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
656 outputModuleName=streamType+
'output' 657 setattr(self.
process,outputModuleName,output)
658 outputModule=getattr(self.
process,outputModuleName)
659 setattr(self.
process,outputModuleName+
'_step',cms.EndPath(outputModule))
660 path=getattr(self.
process,outputModuleName+
'_step')
661 self.schedule.append(path)
663 if self._options.outputCommands
and streamType!=
'DQM':
664 for evct
in self._options.outputCommands.split(
','):
665 if not evct:
continue 666 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
668 if not self._options.inlineEventContent:
669 tmpstreamType=streamType
670 if "NANOEDM" in tmpstreamType :
671 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
672 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
674 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
676 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
682 Add selected standard sequences to the process 685 if self._options.pileup:
686 pileupSpec=self._options.pileup.split(
',')[0]
689 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
690 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
691 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
695 if '.' in pileupSpec:
696 mixingDict={
'file':pileupSpec}
697 elif pileupSpec.startswith(
'file:'):
698 mixingDict={
'file':pileupSpec[5:]}
701 mixingDict=copy.copy(Mixing[pileupSpec])
702 if len(self._options.pileup.split(
','))>1:
703 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(
',')+1:]))
706 if 'file:' in pileupSpec:
708 self.process.load(mixingDict[
'file'])
709 print(
"inlining mixing module configuration")
710 self._options.inlineObjets+=
',mix' 714 mixingDict.pop(
'file')
715 if not "DATAMIX" in self.stepMap.keys():
716 if self._options.pileup_input:
717 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
718 mixingDict[
'F']=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
719 elif self._options.pileup_input.startswith(
"filelist:"):
720 mixingDict[
'F']=(
filesFromList(self._options.pileup_input[9:]))[0]
722 mixingDict[
'F']=self._options.pileup_input.split(
',')
724 for command
in specialization:
726 if len(mixingDict)!=0:
727 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
734 if (
'SIM' in self.
stepMap or 'reSIM' in self.
stepMap)
and not self._options.fast:
739 print(
"Geometry option",self._options.geometry,
"unknown.")
746 stepSpec = self.
stepMap[stepName]
747 print(
"Step:", stepName,
"Spec:",stepSpec)
748 if stepName.startswith(
're'):
750 if stepName[2:]
not in self._options.donotDropOnInput:
751 self._options.inputEventContent=
'%s,%s'%(stepName.upper(),self._options.inputEventContent)
752 stepName=stepName[2:]
754 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
755 elif isinstance(stepSpec, list):
756 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
757 elif isinstance(stepSpec, tuple):
758 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
760 raise ValueError(
"Invalid step definition")
762 if self._options.restoreRNDSeeds!=
False:
764 if self._options.restoreRNDSeeds==
True:
765 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
767 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
768 if self._options.inputEventContent
or self._options.inputCommands:
769 if self._options.inputCommands:
770 self._options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 772 self._options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 776 if self._options.inputEventContent:
778 def dropSecondDropStar(iec):
789 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
790 for evct
in self._options.inputEventContent.split(
','):
791 if evct==
'':
continue 792 theEventContent = getattr(self.
process, evct+
"EventContent")
793 if hasattr(theEventContent,
'outputCommands'):
794 self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
795 if hasattr(theEventContent,
'inputCommands'):
796 self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
798 dropSecondDropStar(self.process.source.inputCommands)
800 if not self._options.dropDescendant:
801 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
807 """Add conditions to the process""" 808 if not self._options.conditions:
return 810 if 'FrontierConditions_GlobalTag' in self._options.conditions:
811 print(
'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
812 self._options.conditions = self._options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
816 self.process.GlobalTag =
GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
817 self.additionalCommands.append(
'from Configuration.AlCa.GlobalTag import GlobalTag')
818 self.additionalCommands.append(
'process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
822 """Include the customise code """ 826 for c
in self._options.customisation_file:
827 custOpt.extend(c.split(
","))
829 for c
in self._options.customisation_file_unsch:
830 custOpt.extend(c.split(
","))
836 raise Exception(
"more than . in the specification:"+opt)
837 fileName=opt.split(
'.')[0]
838 if opt.count(
'.')==0: rest=
'customise' 840 rest=opt.split(
'.')[1]
841 if rest==
'py': rest=
'customise' 843 if fileName
in custMap:
844 custMap[fileName].extend(rest.split(
'+'))
846 custMap[fileName]=rest.split(
'+')
851 final_snippet=
'\n# customisation of the process.\n' 855 allFcn.extend(custMap[opt])
857 if allFcn.count(fcn)!=1:
858 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
862 packageName = f.replace(
".py",
"").
replace(
"/",
".")
863 __import__(packageName)
864 package = sys.modules[packageName]
867 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
869 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 870 if self._options.inline_custom:
871 for line
in file(customiseFile,
'r'): 872 if "import FWCore.ParameterSet.Config" in line:
874 final_snippet += line
876 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
877 for fcn
in custMap[f]:
878 print(
"customising the process with",fcn,
"from",f)
879 if not hasattr(package,fcn):
881 raise Exception(
"config "+f+
" has no function "+fcn)
885 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
886 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
889 final_snippet +=
'\n# End of customisation functions\n' 895 final_snippet=
'\n# Customisation from command line\n' 896 if self._options.customise_commands:
898 for com
in self._options.customise_commands.split(
'\\n'):
899 com=string.lstrip(com)
901 final_snippet +=
'\n'+com
912 if self._options.particleTable
not in defaultOptions.particleTableList:
913 print(
'Invalid particle table provided. Options are:')
914 print(defaultOptions.particleTable)
918 self.
loadAndRemember(
'SimGeneral.HepPDTESSource.'+self._options.particleTable+
'_cfi')
931 if self._options.isRepacked: self.
RAW2DIGIDefaultCFF=
"Configuration/StandardSequences/RawToDigi_DataMapper_cff" 951 if "DATAMIX" in self.stepMap.keys():
955 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 973 if self._options.fast
or (
'RAW2DIGI' in self.
stepMap and 'RECO' in self.
stepMap):
991 if not self._options.beamspot:
992 self._options.beamspot=VtxSmearedDefaultKey
995 if self._options.isMC==
True:
997 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 998 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 1001 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1004 self._options.beamspot =
None 1010 if self._options.scenario==
'cosmics':
1011 self._options.pileup=
'Cosmics' 1012 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1013 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1014 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1018 if self._options.isMC==
True:
1024 if self._options.scenario==
'HeavyIons':
1025 if not self._options.beamspot:
1026 self._options.beamspot=VtxSmearedHIDefaultKey
1031 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1033 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1036 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1038 if self._options.isMC==
True:
1048 if self._options.isData:
1049 if self._options.magField==defaultOptions.magField:
1050 print(
"magnetic field option forced to: AutoFromDBCurrent")
1051 self._options.magField=
'AutoFromDBCurrent' 1052 self.
magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace(
'.',
'')+
'_cff' 1053 self.
magFieldCFF = self.magFieldCFF.replace(
"__",
'_')
1059 if self._options.fast:
1060 if 'start' in self._options.conditions.lower():
1061 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1063 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1065 def inGeometryKeys(opt):
1066 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1067 if opt
in GeometryConf:
1068 return GeometryConf[opt]
1072 geoms=self._options.geometry.split(
',')
1073 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).
split(
',')
1076 if '/' in geoms[1]
or '_cff' in geoms[1]:
1079 self.
GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1081 if (geoms[0].startswith(
'DB:')):
1086 if '/' in geoms[0]
or '_cff' in geoms[0]:
1089 simGeometry=geoms[0]
1090 if self._options.gflash==
True:
1091 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1093 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1096 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1097 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1099 if self._options.scenario==
'nocoll' or self._options.scenario==
'cosmics':
1100 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1101 self._options.beamspot=
'NoSmear' 1104 if self._options.fast:
1105 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1106 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1113 if self._options.pileup==
'default':
1114 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1115 self._options.pileup=MixingDefaultKey
1119 if self._options.isData:
1120 self._options.pileup=
None 1128 output = cms.OutputModule(
"PoolOutputModule")
1129 if stream.selectEvents.parameters_().__len__()!=0:
1130 output.SelectEvents = stream.selectEvents
1132 output.SelectEvents = cms.untracked.PSet()
1133 output.SelectEvents.SelectEvents=cms.vstring()
1134 if isinstance(stream.paths,tuple):
1135 for path
in stream.paths:
1136 output.SelectEvents.SelectEvents.append(path.label())
1138 output.SelectEvents.SelectEvents.append(stream.paths.label())
1142 if isinstance(stream.content,str):
1143 evtPset=getattr(self.process,stream.content)
1144 for p
in evtPset.parameters_():
1145 setattr(output,p,getattr(evtPset,p))
1146 if not self._options.inlineEventContent:
1147 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1149 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1151 output.outputCommands = stream.content
1154 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1156 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1157 filterName = cms.untracked.string(stream.name))
1159 if self._options.filtername:
1160 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1163 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1165 if workflow
in (
"producers,full"):
1166 if isinstance(stream.paths,tuple):
1167 for path
in stream.paths:
1168 self.schedule.append(path)
1170 self.schedule.append(stream.paths)
1174 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1175 self.additionalOutputs[name] = output
1176 setattr(self.process,name,output)
1178 if workflow ==
'output':
1180 filterList = output.SelectEvents.SelectEvents
1181 for i, filter
in enumerate(filterList):
1182 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1192 if ( len(sequence.split(
'.'))==1 ):
1194 elif ( len(sequence.split(
'.'))==2 ):
1196 sequence=sequence.split(
'.')[1]
1198 print(
"sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1199 print(sequence,
"not recognized")
1206 for i,s
in enumerate(seq.split(
'*')):
1208 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, s) ))
1210 p=getattr(self.
process,prefix)
1211 tmp = getattr(self.
process, s)
1212 if isinstance(tmp, cms.Task):
1216 self.schedule.append(getattr(self.
process,prefix))
1222 self.conditionalPaths.append(prefix)
1223 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, seq) ))
1224 self.schedule.append(getattr(self.
process,prefix))
1226 for i,s
in enumerate(seq.split(
'+')):
1228 setattr(self.
process,sn,getattr(cms,what)( getattr(self.
process, s) ))
1229 self.schedule.append(getattr(self.
process,sn))
1243 """ Enrich the process with alca streams """ 1245 sequence = sequence.split(
'.')[-1]
1248 alcaList = sequence.split(
"+")
1250 from Configuration.AlCa.autoAlca
import autoAlca
1254 for name
in alcaConfig.__dict__:
1255 alcastream = getattr(alcaConfig,name)
1256 shortName = name.replace(
'ALCARECOStream',
'')
1257 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1258 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1259 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1260 self.AlCaPaths.append(shortName)
1261 if 'DQM' in alcaList:
1262 if not self._options.inlineEventContent
and hasattr(self.
process,name):
1263 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1265 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1268 if self._options.hltProcess
or 'HLT' in self.
stepMap:
1269 if isinstance(alcastream.paths,tuple):
1270 for path
in alcastream.paths:
1275 for i
in range(alcaList.count(shortName)):
1276 alcaList.remove(shortName)
1279 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1280 path = getattr(alcaConfig,name)
1281 self.schedule.append(path)
1282 alcaList.remove(
'DQM')
1284 if isinstance(alcastream,cms.Path):
1286 self.blacklist_paths.append(alcastream)
1289 if len(alcaList) != 0:
1291 for name
in alcaConfig.__dict__:
1292 alcastream = getattr(alcaConfig,name)
1293 if isinstance(alcastream,cms.FilteredStream):
1294 available.append(name.replace(
'ALCARECOStream',
''))
1295 print(
"The following alcas could not be found "+
str(alcaList))
1296 print(
"available ",available)
1298 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1303 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1304 print(
"Loading lhe fragment from",loadFragment)
1305 __import__(loadFragment)
1306 self.process.load(loadFragment)
1308 self._options.inlineObjets+=
','+sequence
1310 getattr(self.process,sequence).nEvents =
int(self._options.number)
1313 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1314 self.excludedPaths.append(
"lhe_step")
1315 self.schedule.append( self.process.lhe_step )
1318 """ load the fragment of generator configuration """ 1323 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'')
1325 if not '/' in loadFragment:
1326 loadFragment=
'Configuration.Generator.'+loadFragment
1328 loadFragment=loadFragment.replace(
'/',
'.')
1330 print(
"Loading generator fragment from",loadFragment)
1331 __import__(loadFragment)
1335 if not (self._options.filein
or self._options.dasquery):
1336 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1339 generatorModule=sys.modules[loadFragment]
1340 genModules=generatorModule.__dict__
1346 if self._options.hideGen:
1349 self.process.load(loadFragment)
1351 import FWCore.ParameterSet.Modules
as cmstypes
1352 for name
in genModules:
1353 theObject = getattr(generatorModule,name)
1354 if isinstance(theObject, cmstypes._Module):
1355 self._options.inlineObjets=name+
','+self._options.inlineObjets
1356 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1357 self._options.inlineObjets+=
','+name
1359 if sequence == self.
GENDefaultSeq or sequence ==
'pgen_genonly':
1360 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1362 elif 'generator' in genModules:
1365 """ Enrich the schedule with the rest of the generation step """ 1367 genSeqName=sequence.split(
'.')[-1]
1371 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1372 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1375 raise Exception(
"VertexSmearing type or beamspot "+self._options.beamspot+
" unknown.")
1377 if self._options.scenario ==
'HeavyIons':
1378 if self._options.pileup==
'HiMixGEN':
1379 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1381 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1383 self.process.generation_step = cms.Path( getattr(self.
process,genSeqName) )
1384 self.schedule.append(self.process.generation_step)
1387 self.
executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1393 """ Enrich the schedule with the summary of the filter step """ 1400 """ Enrich the schedule with the simulation step""" 1402 if not self._options.fast:
1403 if self._options.gflash==
True:
1406 if self._options.magField==
'0T':
1409 if self._options.magField==
'0T':
1410 self.
executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1416 """ Enrich the schedule with the digitisation step""" 1419 if self._options.gflash==
True:
1420 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1422 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1423 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1425 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and sequence !=
'pdigi_hi_nogen' and not self.process.source.type_()==
'EmptySource':
1426 if self._options.inputEventContent==
'':
1427 self._options.inputEventContent=
'REGEN' 1429 self._options.inputEventContent=self._options.inputEventContent+
',REGEN' 1436 """ Enrich the schedule with the crossing frame writer step""" 1442 """ Enrich the schedule with the digitisation step""" 1446 if self._options.pileup_input:
1448 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
1449 theFiles=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1450 elif self._options.pileup_input.startswith(
"filelist:"):
1451 theFiles= (
filesFromList(self._options.pileup_input[9:]))[0]
1453 theFiles=self._options.pileup_input.split(
',')
1455 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1470 """ Enrich the schedule with the L1 simulation step""" 1471 assert(sequence ==
None)
1477 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1478 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1479 if sequence
in supported:
1480 self.
loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1481 if self._options.scenario ==
'HeavyIons':
1485 print(
"L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported)
1490 """ Enrich the schedule with the HLT simulation step""" 1492 print(
"no specification of the hlt menu has been given, should never happen")
1493 raise Exception(
'no HLT sequence provided')
1497 from Configuration.HLT.autoHLT
import autoHLT
1500 sequence = autoHLT[key]
1502 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1508 if self._options.scenario ==
'HeavyIons':
1509 optionsForHLT[
'type'] =
'HIon' 1511 optionsForHLT[
'type'] =
'GRun' 1512 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in six.iteritems(optionsForHLT))
1513 if sequence ==
'run,fromSource':
1514 if hasattr(self.process.source,
'firstRun'):
1515 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1516 elif hasattr(self.process.source,
'setRunNumber'):
1517 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1519 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1521 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1525 if self._options.isMC:
1526 self._options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1528 if self._options.name !=
'HLT':
1529 self.additionalCommands.append(
'from HLTrigger.Configuration.CustomConfigs import ProcessName')
1530 self.additionalCommands.append(
'process = ProcessName(process)')
1531 self.additionalCommands.append(
'')
1532 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1535 self.schedule.append(self.process.HLTSchedule)
1536 [self.blacklist_paths.append(path)
for path
in self.process.HLTSchedule
if isinstance(path,(cms.Path,cms.EndPath))]
1539 if self._options.fast:
1540 if not hasattr(self.
process,
'HLTEndSequence'):
1541 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1546 seqReco=sequence.split(
',')[1]
1547 seqDigi=sequence.split(
',')[0]
1549 print(
"RAW2RECO requires two specifications",sequence,
"insufficient")
1563 self.
loadAndRemember(
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1565 for filt
in allMetFilterPaths:
1566 self.schedule.append(getattr(self.
process,
'Flag_'+filt))
1569 ''' Enrich the schedule with L1 HW validation ''' 1572 print(
'\n\n\n DEPRECATED this has no action \n\n\n')
1576 ''' Enrich the schedule with L1 reconstruction ''' 1582 ''' Enrich the schedule with L1 reconstruction ''' 1588 ''' Enrich the schedule with a user defined filter sequence ''' 1590 filterConfig=self.
load(sequence.split(
'.')[0])
1591 filterSeq=sequence.split(
'.')[-1]
1593 class PrintAllModules(
object):
1597 def enter(self,visitee):
1599 label=visitee.label()
1604 def leave(self,v):
pass 1606 expander=PrintAllModules()
1608 self._options.inlineObjets+=
','+expander.inliner
1609 self._options.inlineObjets+=
','+filterSeq
1620 ''' Enrich the schedule with reconstruction ''' 1626 ''' Enrich the schedule with reconstruction ''' 1632 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1633 if not self._options.fast:
1634 print(
"ERROR: this step is only implemented for FastSim")
1637 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1641 ''' Enrich the schedule with PAT ''' 1644 self.labelsToAssociate.append(
'patTask')
1645 if not self._options.runUnscheduled:
1646 raise Exception(
"MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1647 if self._options.isData:
1648 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1650 if self._options.fast:
1651 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1653 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1655 if self._options.hltProcess:
1656 if len(self._options.customise_commands) > 1:
1657 self._options.customise_commands = self._options.customise_commands +
" \n" 1658 self._options.customise_commands = self._options.customise_commands +
"process.patTrigger.processName = \""+self._options.hltProcess+
"\"\n" 1659 self._options.customise_commands = self._options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1660 self._options.customise_commands = self._options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1667 ''' Enrich the schedule with PATGEN ''' 1669 self.labelsToAssociate.append(
'patGENTask')
1670 if not self._options.runUnscheduled:
1671 raise Exception(
"MiniGEN production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1672 if self._options.isData:
1673 raise Exception(
"PATGEN step can only run on MC")
1677 ''' Enrich the schedule with NANO ''' 1680 custom =
"nanoAOD_customizeData" if self._options.isData
else "nanoAOD_customizeMC" 1681 if self._options.runUnscheduled:
1682 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1684 self._options.customisation_file.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1685 if self._options.hltProcess:
1686 if len(self._options.customise_commands) > 1:
1687 self._options.customise_commands = self._options.customise_commands +
" \n" 1688 self._options.customise_commands = self._options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1692 ''' Enrich the schedule with event interpretation ''' 1693 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1694 if sequence
in EventInterpretation:
1696 sequence =
'EIsequence' 1698 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1704 ''' Enrich the schedule with skimming fragments''' 1706 sequence = sequence.split(
'.')[-1]
1708 skimlist=sequence.split(
'+')
1710 from Configuration.Skimming.autoSkim
import autoSkim
1714 for skim
in skimConfig.__dict__:
1715 skimstream = getattr(skimConfig,skim)
1716 if isinstance(skimstream,cms.Path):
1718 self.blacklist_paths.append(skimstream)
1719 if (
not isinstance(skimstream,cms.FilteredStream)):
1721 shortname = skim.replace(
'SKIMStream',
'')
1722 if (sequence==
"all"):
1724 elif (shortname
in skimlist):
1727 if self._options.datatier==
'DQM':
1729 skimstreamDQM = cms.FilteredStream(
1730 responsible = skimstream.responsible,
1731 name = skimstream.name+
'DQM',
1732 paths = skimstream.paths,
1733 selectEvents = skimstream.selectEvents,
1734 content = self._options.datatier+
'EventContent',
1735 dataTier = cms.untracked.string(self._options.datatier)
1738 for i
in range(skimlist.count(shortname)):
1739 skimlist.remove(shortname)
1743 if (skimlist.__len__()!=0
and sequence!=
"all"):
1744 print(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1745 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1748 ''' Enrich the schedule with a user defined sequence ''' 1754 """ Enrich the schedule with the postreco step """ 1761 print(sequence,
"in preparing validation")
1763 from Validation.Configuration.autoValidation
import autoValidation
1765 sequence=sequence.split(
'.')[-1]
1766 if sequence.find(
',')!=-1:
1767 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1768 valSeqName=sequence.split(
',')[1].
split(
'+')
1773 prevalSeqName=sequence.split(
'+')
1774 valSeqName=sequence.split(
'+')
1780 postfix=
'_'+sequence
1781 prevalSeqName=[
'prevalidation'+postfix]
1782 valSeqName=[
'validation'+postfix]
1783 if not hasattr(self.
process,valSeqName[0]):
1785 valSeqName=[sequence]
1796 if (
'HLT' in self.
stepMap and not self._options.fast)
or self._options.hltProcess:
1797 for s
in valSeqName+prevalSeqName:
1800 for (i,s)
in enumerate(prevalSeqName):
1802 setattr(self.
process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.
process, s)) )
1803 self.schedule.append(getattr(self.
process,
'prevalidation_step%s'%NFI(i)))
1805 for (i,s)
in enumerate(valSeqName):
1806 setattr(self.
process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.
process, s)))
1807 self.schedule.append(getattr(self.
process,
'validation_step%s'%NFI(i)))
1813 if not 'DIGI' in self.
stepMap and not self._options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1814 if self._options.restoreRNDSeeds==
False and not self._options.restoreRNDSeeds==
True:
1815 self._options.restoreRNDSeeds=
True 1817 if not 'DIGI' in self.
stepMap and not self._options.fast:
1821 self._options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1823 if hasattr(self.
process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1825 for (i,s)
in enumerate(valSeqName):
1826 getattr(self.
process,
'validation_step%s'%NFI(i)).
insert(0, self.process.genstepfilter)
1832 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1833 It will climb down within PSets, VPSets and VInputTags to find its target""" 1834 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1841 if isinstance(pset, cms._Parameterizable):
1842 for name
in pset.parameters_().
keys():
1848 value = getattr(pset,name)
1849 type = value.pythonTypeName()
1850 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1851 self.
doIt(value,base+
"."+name)
1852 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1853 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1854 elif type
in (
'cms.string',
'cms.untracked.string'):
1858 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1859 for (i,n)
in enumerate(value):
1860 if not isinstance(n, cms.InputTag):
1867 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1868 for (i,n)
in enumerate(value):
1871 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1874 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1879 label = visitee.label()
1880 except AttributeError:
1881 label =
'<Module not in a Process>' 1883 label =
'other execption' 1884 self.
doIt(visitee, label)
1891 print(
"Replacing all InputTag %s => %s"%(oldT,newT))
1894 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1896 self.additionalCommands.append(loadMe)
1897 self.additionalCommands.append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1901 if self._options.hltProcess:
1902 proc=self._options.hltProcess
1904 proc=self.process.name_()
1905 if proc==HLTprocess:
return 1907 print(
"replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc))
1909 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.
additionalCommands:
1910 self.additionalCommands.append(
'from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1911 self.additionalCommands.append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1917 while '@' in repr(seqList)
and level<maxLevel:
1919 for specifiedCommand
in seqList:
1920 if specifiedCommand.startswith(
'@'):
1921 location=specifiedCommand[1:]
1922 if not location
in mapping:
1923 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1924 mappedTo=mapping[location]
1926 mappedTo=mappedTo[index]
1927 seqList.remove(specifiedCommand)
1928 seqList.extend(mappedTo.split(
'+'))
1931 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1937 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1938 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1939 from DQMOffline.Configuration.autoDQM
import autoDQM
1943 if len(set(sequenceList))!=len(sequenceList):
1944 sequenceList=
list(set(sequenceList))
1945 print(
"Duplicate entries for DQM:, using",sequenceList)
1947 pathName=
'dqmoffline_step' 1948 for (i,sequence)
in enumerate(sequenceList):
1950 pathName=
'dqmoffline_%d_step'%(i)
1952 if 'HLT' in self.stepMap.keys()
or self._options.hltProcess:
1955 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process,sequence ) ) )
1956 self.schedule.append(getattr(self.
process,pathName))
1958 if hasattr(self.
process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1960 getattr(self.
process,pathName).
insert(0,self.process.genstepfilter)
1962 pathName=
'dqmofflineOnPAT_step' 1963 for (i,sequence)
in enumerate(postSequenceList):
1965 pathName=
'dqmofflineOnPAT_%d_step'%(i)
1967 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process, sequence ) ) )
1968 self.schedule.append(getattr(self.
process,pathName))
1971 """ Enrich the process with harvesting step """ 1972 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self._options.harvesting+
'_cff' 1976 sequence = sequence.split(
'.')[-1]
1979 harvestingList = sequence.split(
"+")
1980 from DQMOffline.Configuration.autoDQM
import autoDQM
1981 from Validation.Configuration.autoValidation
import autoValidation
1983 combined_mapping = copy.deepcopy( autoDQM )
1984 combined_mapping.update( autoValidation )
1985 self.
expandMapping(harvestingList,combined_mapping,index=-1)
1987 if len(set(harvestingList))!=len(harvestingList):
1988 harvestingList=
list(set(harvestingList))
1989 print(
"Duplicate entries for HARVESTING, using",harvestingList)
1991 for name
in harvestingList:
1992 if not name
in harvestingConfig.__dict__:
1993 print(name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
1995 harvestingstream = getattr(harvestingConfig,name)
1996 if isinstance(harvestingstream,cms.Path):
1997 self.schedule.append(harvestingstream)
1998 self.blacklist_paths.append(harvestingstream)
1999 if isinstance(harvestingstream,cms.Sequence):
2000 setattr(self.
process,name+
"_step",cms.Path(harvestingstream))
2001 self.schedule.append(getattr(self.
process,name+
"_step"))
2007 """ Enrich the process with AlCaHarvesting step """ 2009 sequence=sequence.split(
".")[-1]
2012 harvestingList = sequence.split(
"+")
2016 from Configuration.AlCa.autoPCL
import autoPCL
2019 for name
in harvestingConfig.__dict__:
2020 harvestingstream = getattr(harvestingConfig,name)
2021 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2022 self.schedule.append(harvestingstream)
2023 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2024 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2025 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2026 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2028 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2029 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2030 harvestingList.remove(name)
2032 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2033 self.schedule.append(lastStep)
2035 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2036 print(
"The following harvesting could not be found : ", harvestingList)
2037 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2047 self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2048 self.schedule.append(self.process.reconstruction)
2052 """ Add useful info for the production. """ 2053 self.process.configurationMetadata=cms.untracked.PSet\
2054 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2055 name=cms.untracked.string(
"Applications"),
2056 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2059 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2064 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2065 self.
pythonCfgCode +=
"# with command line options: "+self._options.arguments+
'\n' 2066 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2071 modifierImports=[
'from Configuration.StandardSequences.Eras import eras']
2073 if hasattr(self.
_options,
"era")
and self._options.era :
2075 from Configuration.StandardSequences.Eras
import eras
2076 for requestedEra
in self._options.era.split(
",") :
2077 modifierStrings.append(
"eras."+requestedEra)
2078 modifiers.append(getattr(eras,requestedEra))
2081 if hasattr(self.
_options,
"procModifiers")
and self._options.procModifiers:
2084 for pm
in self._options.procModifiers.split(
','):
2085 modifierStrings.append(pm)
2086 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2087 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2090 self.
pythonCfgCode +=
"process = cms.Process('"+self._options.name+
"'" 2093 if len(modifierStrings)>0:
2100 if len(modifiers)>0:
2101 self.
process = cms.Process(self._options.name,*modifiers)
2103 self.
process = cms.Process(self._options.name)
2109 """ Prepare the configuration string and add missing pieces.""" 2121 outputModuleCfgCode=
"" 2122 if not 'HARVESTING' in self.stepMap.keys()
and not 'ALCAHARVEST' in self.stepMap.keys()
and not 'ALCAOUTPUT' in self.stepMap.keys()
and self.
with_output:
2127 self.
pythonCfgCode +=
"# import of standard configurations\n" 2132 if not hasattr(self.
process,
"configurationMetadata"):
2136 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2151 nl=sorted(self.additionalOutputs.keys())
2154 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2155 tmpOut = cms.EndPath(output)
2156 setattr(self.
process,name+
'OutPath',tmpOut)
2157 self.schedule.append(tmpOut)
2165 for object
in self._options.inlineObjets.split(
','):
2168 if not hasattr(self.
process,object):
2169 print(
'cannot inline -'+object+
'- : not known')
2176 for path
in self.process.paths:
2180 for endpath
in self.process.endpaths:
2186 result =
"process.schedule = cms.Schedule(" 2189 self.process.schedule = cms.Schedule()
2191 if not isinstance(item, cms.Schedule):
2192 self.process.schedule.append(item)
2194 self.process.schedule.extend(item)
2196 if hasattr(self.
process,
"HLTSchedule"):
2197 beforeHLT = self.
schedule[:self.schedule.index(self.process.HLTSchedule)]
2198 afterHLT = self.
schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2199 pathNames = [
'process.'+p.label_()
for p
in beforeHLT]
2200 result +=
','.
join(pathNames)+
')\n' 2201 result +=
'process.schedule.extend(process.HLTSchedule)\n' 2202 pathNames = [
'process.'+p.label_()
for p
in afterHLT]
2203 result +=
'process.schedule.extend(['+
','.
join(pathNames)+
'])\n' 2205 pathNames = [
'process.'+p.label_()
for p
in self.
schedule]
2206 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2211 self.process.schedule.associate(getattr(self.
process, labelToAssociate))
2212 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2216 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2219 if self._options.nThreads
is not "1":
2222 self.
pythonCfgCode +=
"process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+
")\n" 2223 self.
pythonCfgCode +=
"process.options.numberOfStreams=cms.untracked.uint32(0)\n" 2225 if self._options.isRepacked:
2227 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2228 self.
pythonCfgCode +=
"MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n" 2229 MassReplaceInputTag(self.
process, new=
"rawDataMapperByLabel", old=
"rawDataCollector")
2233 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2241 for path
in self.process.paths:
2250 if self._options.runUnscheduled:
2253 self.
pythonCfgCode+=
"#do not add changes to your config after this point (unless you know what you are doing)\n" 2254 self.
pythonCfgCode+=
"from FWCore.ParameterSet.Utilities import convertToUnscheduled\n" 2255 self.
pythonCfgCode+=
"process=convertToUnscheduled(process)\n" 2257 from FWCore.ParameterSet.Utilities
import convertToUnscheduled
2264 if hasattr(self.
process,
"logErrorHarvester"):
2266 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2267 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2268 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2269 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2276 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2277 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2278 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2280 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2286 if self._options.io:
2288 if not self._options.io.endswith(
'.io'): self._option.io+=
'.io' 2289 io=open(self._options.io,
'w')
2291 if hasattr(self.process.source,
"fileNames"):
2292 if len(self.process.source.fileNames.value()):
2293 ioJson[
'primary']=self.process.source.fileNames.value()
2294 if hasattr(self.process.source,
"secondaryFileNames"):
2295 if len(self.process.source.secondaryFileNames.value()):
2296 ioJson[
'secondary']=self.process.source.secondaryFileNames.value()
2297 if self._options.pileup_input
and (self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:')):
2298 ioJson[
'pileup']=self._options.pileup_input[4:]
2299 for (o,om)
in self.process.outputModules_().
items():
2300 ioJson[o]=om.fileName.value()
2301 ioJson[
'GT']=self.process.GlobalTag.globaltag.value()
2305 io.write(json.dumps(ioJson))
def load(self, includeFile)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def prepare_L1REPACK(self, sequence=None)
def finalizeFastSimHLT(self)
inliner
load the relevant part
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def customiseEarlyDelete(process)
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
S & print(S &os, JobReport::InputFile const &f)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_RECO(self, sequence="reconstruction")
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def prepare_PATGEN(self, sequence="miniGEN")
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def prepare_RECOSIM(self, sequence="recosim")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, sequence=None)
def convertToUnscheduled(proc)
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_NANO(self, sequence="nanoAOD")
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
def prepare_ALCAHARVEST(self, sequence=None)
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def completeInputCommand(self)
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def addStandardSequences(self)
def prepare_PAT(self, sequence="miniAOD")
nextScheduleIsConditional
put the filtering path in the schedule