3 from __future__
import print_function
4 __version__ =
"$Revision: 1.19 $" 5 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 7 import FWCore.ParameterSet.Config
as cms
8 from FWCore.ParameterSet.Modules
import _Module
13 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
18 from subprocess
import Popen,PIPE
19 import FWCore.ParameterSet.DictTypes
as DictTypes
25 defaultOptions.datamix =
'DataOnSim' 26 defaultOptions.isMC=
False 27 defaultOptions.isData=
True 28 defaultOptions.step=
'' 29 defaultOptions.pileup=
'NoPileUp' 30 defaultOptions.pileup_input =
None 31 defaultOptions.pileup_dasoption =
'' 32 defaultOptions.geometry =
'SimDB' 33 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
34 defaultOptions.magField =
'' 35 defaultOptions.conditions =
None 36 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
37 defaultOptions.harvesting=
'AtRunEnd' 38 defaultOptions.gflash =
False 39 defaultOptions.number = -1
40 defaultOptions.number_out =
None 41 defaultOptions.arguments =
"" 42 defaultOptions.name =
"NO NAME GIVEN" 43 defaultOptions.evt_type =
"" 44 defaultOptions.filein =
"" 45 defaultOptions.dasquery=
"" 46 defaultOptions.dasoption=
"" 47 defaultOptions.secondfilein =
"" 48 defaultOptions.customisation_file = []
49 defaultOptions.customisation_file_unsch = []
50 defaultOptions.customise_commands =
"" 51 defaultOptions.inline_custom=
False 52 defaultOptions.particleTable =
'pythiapdt' 53 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
54 defaultOptions.dirin =
'' 55 defaultOptions.dirout =
'' 56 defaultOptions.filetype =
'EDM' 57 defaultOptions.fileout =
'output.root' 58 defaultOptions.filtername =
'' 59 defaultOptions.lazy_download =
False 60 defaultOptions.custom_conditions =
'' 61 defaultOptions.hltProcess =
'' 62 defaultOptions.eventcontent =
None 63 defaultOptions.datatier =
None 64 defaultOptions.inlineEventContent =
True 65 defaultOptions.inlineObjets =
'' 66 defaultOptions.hideGen=
False 67 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
68 defaultOptions.beamspot=
None 69 defaultOptions.outputDefinition =
'' 70 defaultOptions.inputCommands =
None 71 defaultOptions.outputCommands =
None 72 defaultOptions.inputEventContent =
'' 73 defaultOptions.dropDescendant =
False 74 defaultOptions.relval =
None 75 defaultOptions.profile =
None 76 defaultOptions.isRepacked =
False 77 defaultOptions.restoreRNDSeeds =
False 78 defaultOptions.donotDropOnInput =
'' 79 defaultOptions.python_filename =
'' 80 defaultOptions.io=
None 81 defaultOptions.lumiToProcess=
None 82 defaultOptions.fast=
False 83 defaultOptions.runsAndWeightsForMC =
None 84 defaultOptions.runsScenarioForMC =
None 85 defaultOptions.runUnscheduled =
False 86 defaultOptions.timeoutOutput =
False 87 defaultOptions.nThreads =
'1' 91 theObject = getattr(process,name)
92 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
93 return "process."+name+
" = " + theObject.dumpPython(
"process")
94 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
95 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 97 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 100 import FWCore.ParameterSet.Config
as cms
103 for line
in open(fileName,
'r'): 104 if line.count(
".root")>=2:
106 entries=line.replace(
"\n",
"").
split()
107 if not entries[0]
in prim:
108 prim.append(entries[0])
109 if not entries[1]
in sec:
110 sec.append(entries[1])
111 elif (line.find(
".root")!=-1):
112 entry=line.replace(
"\n",
"")
113 if not entry
in prim:
116 if not hasattr(s,
"fileNames"):
117 s.fileNames=cms.untracked.vstring(prim)
119 s.fileNames.extend(prim)
121 if not hasattr(s,
"secondaryFileNames"):
122 s.secondaryFileNames=cms.untracked.vstring(sec)
124 s.secondaryFileNames.extend(sec)
125 print(
"found files: ",prim)
127 raise Exception(
"There are not files in input from the file list")
129 print(
"found parent files:",sec)
134 import FWCore.ParameterSet.Config
as cms
137 print(
"the query is",query)
140 while eC!=0
and count<3:
142 print(
'Sleeping, then retrying DAS')
144 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True)
146 tupleP = os.waitpid(p.pid, 0)
150 print(
"DAS succeeded after",count,
"attempts",eC)
152 print(
"DAS failed 3 times- I give up")
153 for line
in pipe.split(
'\n'):
154 if line.count(
".root")>=2:
156 entries=line.replace(
"\n",
"").
split()
157 if not entries[0]
in prim:
158 prim.append(entries[0])
159 if not entries[1]
in sec:
160 sec.append(entries[1])
161 elif (line.find(
".root")!=-1):
162 entry=line.replace(
"\n",
"")
163 if not entry
in prim:
166 if not hasattr(s,
"fileNames"):
167 s.fileNames=cms.untracked.vstring(prim)
169 s.fileNames.extend(prim)
171 if not hasattr(s,
"secondaryFileNames"):
172 s.secondaryFileNames=cms.untracked.vstring(sec)
174 s.secondaryFileNames.extend(sec)
175 print(
"found files: ",prim)
177 print(
"found parent files:",sec)
180 def anyOf(listOfKeys,dict,opt=None):
189 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
192 """The main building routines """ 194 def __init__(self, options, process = None, with_output = False, with_input = False ):
195 """options taken from old cmsDriver and optparse """ 197 options.outfile_name = options.dirout+options.fileout
201 if self._options.isData
and options.isMC:
202 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
207 if 'ENDJOB' in self._options.step:
208 if (hasattr(self.
_options,
"outputDefinition")
and \
209 self._options.outputDefinition !=
'' and \
210 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self._options.outputDefinition)))
or \
211 (hasattr(self.
_options,
"datatier")
and \
212 self._options.datatier
and \
213 'DQMIO' in self._options.datatier):
214 print(
"removing ENDJOB from steps since not compatible with DQMIO dataTier")
215 self._options.step=self._options.step.replace(
',ENDJOB',
'')
220 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
223 for step
in self._options.step.split(
","):
224 if step==
'':
continue 225 stepParts = step.split(
":")
226 stepName = stepParts[0]
227 if stepName
not in stepList
and not stepName.startswith(
're'):
228 raise ValueError(
"Step "+stepName+
" unknown")
229 if len(stepParts)==1:
231 elif len(stepParts)==2:
233 elif len(stepParts)==3:
234 self.
stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
236 raise ValueError(
"Step definition "+step+
" invalid")
237 self.stepKeys.append(stepName)
244 if hasattr(self.
_options,
"no_output_flag")
and self._options.no_output_flag:
271 Function to add the igprof profile service so that you can dump in the middle 274 profileOpts = self._options.profile.split(
':')
276 profilerInterval = 100
277 profilerFormat =
None 278 profilerJobFormat =
None 284 startEvent = profileOpts.pop(0)
285 if not startEvent.isdigit():
286 raise Exception(
"%s is not a number" % startEvent)
287 profilerStart =
int(startEvent)
289 eventInterval = profileOpts.pop(0)
290 if not eventInterval.isdigit():
291 raise Exception(
"%s is not a number" % eventInterval)
292 profilerInterval =
int(eventInterval)
294 profilerFormat = profileOpts.pop(0)
297 if not profilerFormat:
298 profilerFormat =
"%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace(
"_cfi",
""),
300 self._options.pileup,
301 self._options.conditions,
302 self._options.datatier,
303 self._options.profileTypeLabel)
304 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
305 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
306 elif not profilerJobFormat:
307 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 309 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
312 includeFile = includeFile.replace(
'/',
'.')
313 self.process.load(includeFile)
314 return sys.modules[includeFile]
317 """helper routine to load am memorize imports""" 320 includeFile = includeFile.replace(
'/',
'.')
321 self.imports.append(includeFile)
322 self.process.load(includeFile)
323 return sys.modules[includeFile]
326 """helper routine to remember replace statements""" 327 self.additionalCommands.append(command)
328 if not command.strip().startswith(
"#"):
331 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
335 if 'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys():
336 self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring(
'ProductNotFound'),fileMode = cms.untracked.string(
'FULLMERGE'))
338 self.process.options = cms.untracked.PSet( )
340 self.addedObjects.append((
"",
"options"))
342 if self._options.lazy_download:
343 self.process.AdaptorConfig = cms.Service(
"AdaptorConfig",
344 stats = cms.untracked.bool(
True),
345 enable = cms.untracked.bool(
True),
346 cacheHint = cms.untracked.string(
"lazy-download"),
347 readHint = cms.untracked.string(
"read-ahead-buffered")
349 self.addedObjects.append((
"Setup lazy download",
"AdaptorConfig"))
354 if self._options.profile:
356 self.process.IgProfService = cms.Service(
"IgProfService",
357 reportFirstEvent = cms.untracked.int32(start),
358 reportEventInterval = cms.untracked.int32(interval),
359 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
360 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
361 self.addedObjects.append((
"Setup IGProf Service for profiling",
"IgProfService"))
364 """Here we decide how many evts will be processed""" 365 self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(
int(self._options.number)))
366 if self._options.number_out:
367 self.process.maxEvents.output = cms.untracked.int32(
int(self._options.number_out))
368 self.addedObjects.append((
"",
"maxEvents"))
371 """Here the source is built. Priority: file, generator""" 372 self.addedObjects.append((
"Input source",
"source"))
374 def filesFromOption(self):
375 for entry
in self._options.filein.split(
','):
377 if entry.startswith(
"filelist:"):
379 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
380 filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
382 self.process.source.fileNames.append(self._options.dirin+entry)
383 if self._options.secondfilein:
384 if not hasattr(self.process.source,
"secondaryFileNames"):
385 raise Exception(
"--secondfilein not compatible with "+self._options.filetype+
"input type")
386 for entry
in self._options.secondfilein.split(
','):
388 if entry.startswith(
"filelist:"):
389 self.process.source.secondaryFileNames.extend((
filesFromList(entry[9:]))[0])
390 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
391 self.process.source.secondaryFileNames.extend((
filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
393 self.process.source.secondaryFileNames.append(self._options.dirin+entry)
395 if self._options.filein
or self._options.dasquery:
396 if self._options.filetype ==
"EDM":
397 self.process.source=cms.Source(
"PoolSource",
398 fileNames = cms.untracked.vstring(),
399 secondaryFileNames= cms.untracked.vstring())
400 filesFromOption(self)
401 elif self._options.filetype ==
"DAT":
402 self.process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
403 filesFromOption(self)
404 elif self._options.filetype ==
"LHE":
405 self.process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
406 if self._options.filein.startswith(
"lhe:"):
408 args=self._options.filein.split(
':')
410 print(
'LHE input from article ',article)
411 location=
'/store/lhe/' 413 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
414 for line
in textOfFiles:
415 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
416 self.process.source.fileNames.append(location+article+
'/'+fileName)
419 print(
'Issue to load LHE files, please check and try again.')
422 if len(self.process.source.fileNames)==0:
423 print(
'Issue with empty filename, but can pass line check')
426 self.process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
428 filesFromOption(self)
430 elif self._options.filetype ==
"DQM":
431 self.process.source=cms.Source(
"DQMRootSource",
432 fileNames = cms.untracked.vstring())
433 filesFromOption(self)
435 elif self._options.filetype ==
"DQMDAQ":
437 self.process.source=cms.Source(
"DQMStreamerReader")
440 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
441 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
443 if self._options.dasquery!=
'':
444 self.process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
445 filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
447 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
448 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
451 if 'GEN' in self.stepMap.keys():
452 if self._options.inputCommands:
453 self._options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 455 self._options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 457 if self.process.source
and self._options.inputCommands:
458 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
459 for command
in self._options.inputCommands.split(
','):
461 command = command.strip()
462 if command==
'':
continue 463 self.process.source.inputCommands.append(command)
464 if not self._options.dropDescendant:
465 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
467 if self._options.lumiToProcess:
468 import FWCore.PythonUtilities.LumiList
as LumiList
469 self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange(
LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().
split(
',') )
471 if 'GEN' in self.stepMap.keys()
or 'LHE' in self.
stepMap or (
not self._options.filein
and hasattr(self.
_options,
"evt_type")):
472 if self.process.source
is None:
473 self.process.source=cms.Source(
"EmptySource")
477 if self._options.runsAndWeightsForMC
or self._options.runsScenarioForMC :
478 if not self._options.isMC :
479 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
480 if self._options.runsAndWeightsForMC:
483 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
484 if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
485 __import__(RunsAndWeights[self._options.runsScenarioForMC])
486 self.
runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
488 self.
runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
491 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
493 self.additionalCommands.append(
'import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
494 self.additionalCommands.append(
'ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.
runsAndWeights))
499 """ Add output module to the process """ 501 if self._options.outputDefinition:
502 if self._options.datatier:
503 print(
"--datatier & --eventcontent options ignored")
506 outList = eval(self._options.outputDefinition)
507 for (id,outDefDict)
in enumerate(outList):
508 outDefDictStr=outDefDict.__str__()
509 if not isinstance(outDefDict,dict):
510 raise Exception(
"--output needs to be passed a list of dict"+self._options.outputDefinition+
" is invalid")
512 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
515 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
516 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
517 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
518 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
519 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
521 if not theModuleLabel:
522 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
523 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
524 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 526 for name
in tryNames:
527 if not hasattr(self.
process,name):
530 if not theModuleLabel:
531 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
533 defaultFileName=self._options.outfile_name
535 defaultFileName=self._options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
537 theFileName=self._options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
538 if not theFileName.endswith(
'.root'):
542 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
543 if theStreamType==
'DQMIO': theStreamType=
'DQM' 544 if theStreamType==
'ALL':
545 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
547 theEventContent = getattr(self.
process, theStreamType+
"EventContent")
551 if theStreamType==
'ALCARECO' and not theFilterName:
552 theFilterName=
'StreamALCACombined' 555 CppType=
'PoolOutputModule' 556 if self._options.timeoutOutput:
557 CppType=
'TimeoutPoolOutputModule' 558 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 559 output = cms.OutputModule(CppType,
560 theEventContent.clone(),
561 fileName = cms.untracked.string(theFileName),
562 dataset = cms.untracked.PSet(
563 dataTier = cms.untracked.string(theTier),
564 filterName = cms.untracked.string(theFilterName))
566 if not theSelectEvent
and hasattr(self.
process,
'generation_step')
and theStreamType!=
'LHE':
567 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
568 if not theSelectEvent
and hasattr(self.
process,
'filtering_step'):
569 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
571 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
574 if not hasattr(output,
'SelectEvents'):
575 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
577 output.SelectEvents.SelectEvents.extend(getattr(self.
process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
580 if hasattr(self.
process,theModuleLabel):
581 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
583 setattr(self.
process,theModuleLabel,output)
584 outputModule=getattr(self.
process,theModuleLabel)
585 setattr(self.
process,theModuleLabel+
'_step',cms.EndPath(outputModule))
586 path=getattr(self.
process,theModuleLabel+
'_step')
587 self.schedule.append(path)
589 if not self._options.inlineEventContent
and hasattr(self.
process,theStreamType+
"EventContent"):
590 def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process.
"+theStreamType+"EventContent.outputCommands)
"): 592 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
593 if theExtraOutputCommands:
594 if not isinstance(theExtraOutputCommands,list):
595 raise Exception(
"extra ouput command in --option must be a list of strings")
596 if hasattr(self.
process,theStreamType+
"EventContent"):
597 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
599 outputModule.outputCommands.extend(theExtraOutputCommands)
601 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
606 streamTypes=self._options.eventcontent.split(
',')
607 tiers=self._options.datatier.split(
',')
608 if not self._options.outputDefinition
and len(streamTypes)!=len(tiers):
609 raise Exception(
"number of event content arguments does not match number of datatier arguments")
612 if self._options.step.split(
',')[0].
split(
':')[0] ==
'ALCA':
615 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
616 if streamType==
'':
continue 617 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self._options.step:
continue 618 if streamType==
'DQMIO': streamType=
'DQM' 619 eventContent=streamType
621 if streamType ==
"NANOEDMAOD" :
622 eventContent =
"NANOAOD" 623 elif streamType ==
"NANOEDMAODSIM" :
624 eventContent =
"NANOAODSIM" 625 theEventContent = getattr(self.
process, eventContent+
"EventContent")
627 theFileName=self._options.outfile_name
628 theFilterName=self._options.filtername
630 theFileName=self._options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
631 theFilterName=self._options.filtername
632 CppType=
'PoolOutputModule' 633 if self._options.timeoutOutput:
634 CppType=
'TimeoutPoolOutputModule' 635 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 636 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 637 output = cms.OutputModule(CppType,
639 fileName = cms.untracked.string(theFileName),
640 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
641 filterName = cms.untracked.string(theFilterName)
644 if hasattr(self.
process,
"generation_step")
and streamType!=
'LHE':
645 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
646 if hasattr(self.
process,
"filtering_step"):
647 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
649 if streamType==
'ALCARECO':
650 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
652 if "MINIAOD" in streamType:
653 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
656 outputModuleName=streamType+
'output' 657 setattr(self.
process,outputModuleName,output)
658 outputModule=getattr(self.
process,outputModuleName)
659 setattr(self.
process,outputModuleName+
'_step',cms.EndPath(outputModule))
660 path=getattr(self.
process,outputModuleName+
'_step')
661 self.schedule.append(path)
663 if self._options.outputCommands
and streamType!=
'DQM':
664 for evct
in self._options.outputCommands.split(
','):
665 if not evct:
continue 666 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
668 if not self._options.inlineEventContent:
669 tmpstreamType=streamType
670 if "NANOEDM" in tmpstreamType :
671 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
672 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
674 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
676 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
682 Add selected standard sequences to the process 685 if self._options.pileup:
686 pileupSpec=self._options.pileup.split(
',')[0]
689 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
690 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
691 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
695 if '.' in pileupSpec:
696 mixingDict={
'file':pileupSpec}
697 elif pileupSpec.startswith(
'file:'):
698 mixingDict={
'file':pileupSpec[5:]}
701 mixingDict=copy.copy(Mixing[pileupSpec])
702 if len(self._options.pileup.split(
','))>1:
703 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(
',')+1:]))
706 if 'file:' in pileupSpec:
708 self.process.load(mixingDict[
'file'])
709 print(
"inlining mixing module configuration")
710 self._options.inlineObjets+=
',mix' 714 mixingDict.pop(
'file')
715 if not "DATAMIX" in self.stepMap.keys():
716 if self._options.pileup_input:
717 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
718 mixingDict[
'F']=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
719 elif self._options.pileup_input.startswith(
"filelist:"):
720 mixingDict[
'F']=(
filesFromList(self._options.pileup_input[9:]))[0]
722 mixingDict[
'F']=self._options.pileup_input.split(
',')
724 for command
in specialization:
726 if len(mixingDict)!=0:
727 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
734 if (
'SIM' in self.
stepMap or 'reSIM' in self.
stepMap)
and not self._options.fast:
739 print(
"Geometry option",self._options.geometry,
"unknown.")
746 stepSpec = self.
stepMap[stepName]
747 print(
"Step:", stepName,
"Spec:",stepSpec)
748 if stepName.startswith(
're'):
750 if stepName[2:]
not in self._options.donotDropOnInput:
751 self._options.inputEventContent=
'%s,%s'%(stepName.upper(),self._options.inputEventContent)
752 stepName=stepName[2:]
754 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
755 elif isinstance(stepSpec, list):
756 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
757 elif isinstance(stepSpec, tuple):
758 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
760 raise ValueError(
"Invalid step definition")
762 if self._options.restoreRNDSeeds!=
False:
764 if self._options.restoreRNDSeeds==
True:
765 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
767 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
768 if self._options.inputEventContent
or self._options.inputCommands:
769 if self._options.inputCommands:
770 self._options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 772 self._options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 776 if self._options.inputEventContent:
778 def dropSecondDropStar(iec):
789 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
790 for evct
in self._options.inputEventContent.split(
','):
791 if evct==
'':
continue 792 theEventContent = getattr(self.
process, evct+
"EventContent")
793 if hasattr(theEventContent,
'outputCommands'):
794 self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
795 if hasattr(theEventContent,
'inputCommands'):
796 self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
798 dropSecondDropStar(self.process.source.inputCommands)
800 if not self._options.dropDescendant:
801 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
807 """Add conditions to the process""" 808 if not self._options.conditions:
return 810 if 'FrontierConditions_GlobalTag' in self._options.conditions:
811 print(
'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
812 self._options.conditions = self._options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
816 self.process.GlobalTag =
GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
817 self.additionalCommands.append(
'from Configuration.AlCa.GlobalTag import GlobalTag')
818 self.additionalCommands.append(
'process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
822 """Include the customise code """ 826 for c
in self._options.customisation_file:
827 custOpt.extend(c.split(
","))
829 for c
in self._options.customisation_file_unsch:
830 custOpt.extend(c.split(
","))
836 raise Exception(
"more than . in the specification:"+opt)
837 fileName=opt.split(
'.')[0]
838 if opt.count(
'.')==0: rest=
'customise' 840 rest=opt.split(
'.')[1]
841 if rest==
'py': rest=
'customise' 843 if fileName
in custMap:
844 custMap[fileName].extend(rest.split(
'+'))
846 custMap[fileName]=rest.split(
'+')
851 final_snippet=
'\n# customisation of the process.\n' 855 allFcn.extend(custMap[opt])
857 if allFcn.count(fcn)!=1:
858 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
862 packageName = f.replace(
".py",
"").
replace(
"/",
".")
863 __import__(packageName)
864 package = sys.modules[packageName]
867 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
869 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 870 if self._options.inline_custom:
871 for line
in file(customiseFile,
'r'): 872 if "import FWCore.ParameterSet.Config" in line:
874 final_snippet += line
876 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
877 for fcn
in custMap[f]:
878 print(
"customising the process with",fcn,
"from",f)
879 if not hasattr(package,fcn):
881 raise Exception(
"config "+f+
" has no function "+fcn)
885 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
886 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
889 final_snippet +=
'\n# End of customisation functions\n' 895 final_snippet=
'\n# Customisation from command line\n' 896 if self._options.customise_commands:
898 for com
in self._options.customise_commands.split(
'\\n'):
899 com=string.lstrip(com)
901 final_snippet +=
'\n'+com
912 if self._options.particleTable
not in defaultOptions.particleTableList:
913 print(
'Invalid particle table provided. Options are:')
914 print(defaultOptions.particleTable)
918 self.
loadAndRemember(
'SimGeneral.HepPDTESSource.'+self._options.particleTable+
'_cfi')
931 if self._options.isRepacked: self.
RAW2DIGIDefaultCFF=
"Configuration/StandardSequences/RawToDigi_DataMapper_cff" 951 if "DATAMIX" in self.stepMap.keys():
955 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 973 if self._options.fast
or (
'RAW2DIGI' in self.
stepMap and 'RECO' in self.
stepMap):
991 if not self._options.beamspot:
992 self._options.beamspot=VtxSmearedDefaultKey
995 if self._options.isMC==
True:
997 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 998 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 1001 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1004 self._options.beamspot =
None 1010 if self._options.scenario==
'cosmics':
1011 self._options.pileup=
'Cosmics' 1012 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1013 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1014 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1018 if self._options.isMC==
True:
1024 if self._options.scenario==
'HeavyIons':
1025 if not self._options.beamspot:
1026 self._options.beamspot=VtxSmearedHIDefaultKey
1031 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1033 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1036 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1038 if self._options.isMC==
True:
1048 if self._options.isData:
1049 if self._options.magField==defaultOptions.magField:
1050 print(
"magnetic field option forced to: AutoFromDBCurrent")
1051 self._options.magField=
'AutoFromDBCurrent' 1052 self.
magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace(
'.',
'')+
'_cff' 1053 self.
magFieldCFF = self.magFieldCFF.replace(
"__",
'_')
1059 if self._options.fast:
1060 if 'start' in self._options.conditions.lower():
1061 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1063 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1065 def inGeometryKeys(opt):
1066 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1067 if opt
in GeometryConf:
1068 return GeometryConf[opt]
1072 geoms=self._options.geometry.split(
',')
1073 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).
split(
',')
1076 if '/' in geoms[1]
or '_cff' in geoms[1]:
1079 self.
GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1081 if (geoms[0].startswith(
'DB:')):
1086 if '/' in geoms[0]
or '_cff' in geoms[0]:
1089 simGeometry=geoms[0]
1090 if self._options.gflash==
True:
1091 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1093 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1096 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1097 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1099 if self._options.scenario==
'nocoll' or self._options.scenario==
'cosmics':
1100 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1101 self._options.beamspot=
'NoSmear' 1104 if self._options.fast:
1105 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1106 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1112 if self._options.pileup==
'default':
1113 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1114 self._options.pileup=MixingDefaultKey
1118 if self._options.isData:
1119 self._options.pileup=
None 1127 output = cms.OutputModule(
"PoolOutputModule")
1128 if stream.selectEvents.parameters_().__len__()!=0:
1129 output.SelectEvents = stream.selectEvents
1131 output.SelectEvents = cms.untracked.PSet()
1132 output.SelectEvents.SelectEvents=cms.vstring()
1133 if isinstance(stream.paths,tuple):
1134 for path
in stream.paths:
1135 output.SelectEvents.SelectEvents.append(path.label())
1137 output.SelectEvents.SelectEvents.append(stream.paths.label())
1141 if isinstance(stream.content,str):
1142 evtPset=getattr(self.process,stream.content)
1143 for p
in evtPset.parameters_():
1144 setattr(output,p,getattr(evtPset,p))
1145 if not self._options.inlineEventContent:
1146 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1148 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1150 output.outputCommands = stream.content
1153 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1155 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1156 filterName = cms.untracked.string(stream.name))
1158 if self._options.filtername:
1159 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1162 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1164 if workflow
in (
"producers,full"):
1165 if isinstance(stream.paths,tuple):
1166 for path
in stream.paths:
1167 self.schedule.append(path)
1169 self.schedule.append(stream.paths)
1173 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1174 self.additionalOutputs[name] = output
1175 setattr(self.process,name,output)
1177 if workflow ==
'output':
1179 filterList = output.SelectEvents.SelectEvents
1180 for i, filter
in enumerate(filterList):
1181 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1191 if ( len(sequence.split(
'.'))==1 ):
1193 elif ( len(sequence.split(
'.'))==2 ):
1195 sequence=sequence.split(
'.')[1]
1197 print(
"sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1198 print(sequence,
"not recognized")
1205 for i,s
in enumerate(seq.split(
'*')):
1207 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, s) ))
1209 p=getattr(self.
process,prefix)
1210 tmp = getattr(self.
process, s)
1211 if isinstance(tmp, cms.Task):
1215 self.schedule.append(getattr(self.
process,prefix))
1221 self.conditionalPaths.append(prefix)
1222 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, seq) ))
1223 self.schedule.append(getattr(self.
process,prefix))
1225 for i,s
in enumerate(seq.split(
'+')):
1227 setattr(self.
process,sn,getattr(cms,what)( getattr(self.
process, s) ))
1228 self.schedule.append(getattr(self.
process,sn))
1242 """ Enrich the process with alca streams """ 1244 sequence = sequence.split(
'.')[-1]
1247 alcaList = sequence.split(
"+")
1249 from Configuration.AlCa.autoAlca
import autoAlca
1253 for name
in alcaConfig.__dict__:
1254 alcastream = getattr(alcaConfig,name)
1255 shortName = name.replace(
'ALCARECOStream',
'')
1256 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1257 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1258 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1259 self.AlCaPaths.append(shortName)
1260 if 'DQM' in alcaList:
1261 if not self._options.inlineEventContent
and hasattr(self.
process,name):
1262 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1264 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1267 if self._options.hltProcess
or 'HLT' in self.
stepMap:
1268 if isinstance(alcastream.paths,tuple):
1269 for path
in alcastream.paths:
1274 for i
in range(alcaList.count(shortName)):
1275 alcaList.remove(shortName)
1278 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1279 path = getattr(alcaConfig,name)
1280 self.schedule.append(path)
1281 alcaList.remove(
'DQM')
1283 if isinstance(alcastream,cms.Path):
1285 self.blacklist_paths.append(alcastream)
1288 if len(alcaList) != 0:
1290 for name
in alcaConfig.__dict__:
1291 alcastream = getattr(alcaConfig,name)
1292 if isinstance(alcastream,cms.FilteredStream):
1293 available.append(name.replace(
'ALCARECOStream',
''))
1294 print(
"The following alcas could not be found "+
str(alcaList))
1295 print(
"available ",available)
1297 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1302 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1303 print(
"Loading lhe fragment from",loadFragment)
1304 __import__(loadFragment)
1305 self.process.load(loadFragment)
1307 self._options.inlineObjets+=
','+sequence
1309 getattr(self.process,sequence).nEvents =
int(self._options.number)
1312 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1313 self.excludedPaths.append(
"lhe_step")
1314 self.schedule.append( self.process.lhe_step )
1317 """ load the fragment of generator configuration """ 1322 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'')
1324 if not '/' in loadFragment:
1325 loadFragment=
'Configuration.Generator.'+loadFragment
1327 loadFragment=loadFragment.replace(
'/',
'.')
1329 print(
"Loading generator fragment from",loadFragment)
1330 __import__(loadFragment)
1334 if not (self._options.filein
or self._options.dasquery):
1335 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1338 generatorModule=sys.modules[loadFragment]
1339 genModules=generatorModule.__dict__
1345 if self._options.hideGen:
1348 self.process.load(loadFragment)
1350 import FWCore.ParameterSet.Modules
as cmstypes
1351 for name
in genModules:
1352 theObject = getattr(generatorModule,name)
1353 if isinstance(theObject, cmstypes._Module):
1354 self._options.inlineObjets=name+
','+self._options.inlineObjets
1355 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1356 self._options.inlineObjets+=
','+name
1358 if sequence == self.
GENDefaultSeq or sequence ==
'pgen_genonly':
1359 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1361 elif 'generator' in genModules:
1364 """ Enrich the schedule with the rest of the generation step """ 1366 genSeqName=sequence.split(
'.')[-1]
1370 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1371 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1374 raise Exception(
"VertexSmearing type or beamspot "+self._options.beamspot+
" unknown.")
1376 if self._options.scenario ==
'HeavyIons':
1377 if self._options.pileup==
'HiMixGEN':
1378 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1380 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1382 self.process.generation_step = cms.Path( getattr(self.
process,genSeqName) )
1383 self.schedule.append(self.process.generation_step)
1386 self.
executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1392 """ Enrich the schedule with the summary of the filter step """ 1399 """ Enrich the schedule with the simulation step""" 1401 if not self._options.fast:
1402 if self._options.gflash==
True:
1405 if self._options.magField==
'0T':
1408 if self._options.magField==
'0T':
1409 self.
executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1415 """ Enrich the schedule with the digitisation step""" 1418 if self._options.gflash==
True:
1419 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1421 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1422 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1424 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and sequence !=
'pdigi_hi_nogen' and not self.process.source.type_()==
'EmptySource':
1425 if self._options.inputEventContent==
'':
1426 self._options.inputEventContent=
'REGEN' 1428 self._options.inputEventContent=self._options.inputEventContent+
',REGEN' 1435 """ Enrich the schedule with the crossing frame writer step""" 1441 """ Enrich the schedule with the digitisation step""" 1445 if self._options.pileup_input:
1447 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
1448 theFiles=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1449 elif self._options.pileup_input.startswith(
"filelist:"):
1450 theFiles= (
filesFromList(self._options.pileup_input[9:]))[0]
1452 theFiles=self._options.pileup_input.split(
',')
1454 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1469 """ Enrich the schedule with the L1 simulation step""" 1470 assert(sequence ==
None)
1476 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1477 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1478 if sequence
in supported:
1479 self.
loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1480 if self._options.scenario ==
'HeavyIons':
1484 print(
"L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported)
1489 """ Enrich the schedule with the HLT simulation step""" 1491 print(
"no specification of the hlt menu has been given, should never happen")
1492 raise Exception(
'no HLT sequence provided')
1496 from Configuration.HLT.autoHLT
import autoHLT
1499 sequence = autoHLT[key]
1501 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1507 if self._options.scenario ==
'HeavyIons':
1508 optionsForHLT[
'type'] =
'HIon' 1510 optionsForHLT[
'type'] =
'GRun' 1511 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in six.iteritems(optionsForHLT))
1512 if sequence ==
'run,fromSource':
1513 if hasattr(self.process.source,
'firstRun'):
1514 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1515 elif hasattr(self.process.source,
'setRunNumber'):
1516 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1518 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1520 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1524 if self._options.isMC:
1525 self._options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1527 if self._options.name !=
'HLT':
1528 self.additionalCommands.append(
'from HLTrigger.Configuration.CustomConfigs import ProcessName')
1529 self.additionalCommands.append(
'process = ProcessName(process)')
1530 self.additionalCommands.append(
'')
1531 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1534 self.schedule.append(self.process.HLTSchedule)
1535 [self.blacklist_paths.append(path)
for path
in self.process.HLTSchedule
if isinstance(path,(cms.Path,cms.EndPath))]
1538 if self._options.fast:
1539 if not hasattr(self.
process,
'HLTEndSequence'):
1540 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1545 seqReco=sequence.split(
',')[1]
1546 seqDigi=sequence.split(
',')[0]
1548 print(
"RAW2RECO requires two specifications",sequence,
"insufficient")
1562 self.
loadAndRemember(
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1564 for filt
in allMetFilterPaths:
1565 self.schedule.append(getattr(self.
process,
'Flag_'+filt))
1568 ''' Enrich the schedule with L1 HW validation ''' 1571 print(
'\n\n\n DEPRECATED this has no action \n\n\n')
1575 ''' Enrich the schedule with L1 reconstruction ''' 1581 ''' Enrich the schedule with L1 reconstruction ''' 1587 ''' Enrich the schedule with a user defined filter sequence ''' 1589 filterConfig=self.
load(sequence.split(
'.')[0])
1590 filterSeq=sequence.split(
'.')[-1]
1592 class PrintAllModules(
object):
1596 def enter(self,visitee):
1598 label=visitee.label()
1603 def leave(self,v):
pass 1605 expander=PrintAllModules()
1607 self._options.inlineObjets+=
','+expander.inliner
1608 self._options.inlineObjets+=
','+filterSeq
1619 ''' Enrich the schedule with reconstruction ''' 1625 ''' Enrich the schedule with reconstruction ''' 1631 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1632 if not self._options.fast:
1633 print(
"ERROR: this step is only implemented for FastSim")
1636 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1640 ''' Enrich the schedule with PAT ''' 1643 self.labelsToAssociate.append(
'patTask')
1644 if not self._options.runUnscheduled:
1645 raise Exception(
"MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1646 if self._options.isData:
1647 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1649 if self._options.fast:
1650 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1652 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1654 if self._options.hltProcess:
1655 if len(self._options.customise_commands) > 1:
1656 self._options.customise_commands = self._options.customise_commands +
" \n" 1657 self._options.customise_commands = self._options.customise_commands +
"process.patTrigger.processName = \""+self._options.hltProcess+
"\"\n" 1658 self._options.customise_commands = self._options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1659 self._options.customise_commands = self._options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1666 ''' Enrich the schedule with PATGEN ''' 1668 self.labelsToAssociate.append(
'patGENTask')
1669 if not self._options.runUnscheduled:
1670 raise Exception(
"MiniGEN production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1671 if self._options.isData:
1672 raise Exception(
"PATGEN step can only run on MC")
1676 ''' Enrich the schedule with NANO ''' 1679 custom =
"nanoAOD_customizeData" if self._options.isData
else "nanoAOD_customizeMC" 1680 if self._options.runUnscheduled:
1681 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1683 self._options.customisation_file.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1684 if self._options.hltProcess:
1685 if len(self._options.customise_commands) > 1:
1686 self._options.customise_commands = self._options.customise_commands +
" \n" 1687 self._options.customise_commands = self._options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1691 ''' Enrich the schedule with event interpretation ''' 1692 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1693 if sequence
in EventInterpretation:
1695 sequence =
'EIsequence' 1697 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1703 ''' Enrich the schedule with skimming fragments''' 1705 sequence = sequence.split(
'.')[-1]
1707 skimlist=sequence.split(
'+')
1709 from Configuration.Skimming.autoSkim
import autoSkim
1713 for skim
in skimConfig.__dict__:
1714 skimstream = getattr(skimConfig,skim)
1715 if isinstance(skimstream,cms.Path):
1717 self.blacklist_paths.append(skimstream)
1718 if (
not isinstance(skimstream,cms.FilteredStream)):
1720 shortname = skim.replace(
'SKIMStream',
'')
1721 if (sequence==
"all"):
1723 elif (shortname
in skimlist):
1726 if self._options.datatier==
'DQM':
1728 skimstreamDQM = cms.FilteredStream(
1729 responsible = skimstream.responsible,
1730 name = skimstream.name+
'DQM',
1731 paths = skimstream.paths,
1732 selectEvents = skimstream.selectEvents,
1733 content = self._options.datatier+
'EventContent',
1734 dataTier = cms.untracked.string(self._options.datatier)
1737 for i
in range(skimlist.count(shortname)):
1738 skimlist.remove(shortname)
1742 if (skimlist.__len__()!=0
and sequence!=
"all"):
1743 print(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1744 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1747 ''' Enrich the schedule with a user defined sequence ''' 1753 """ Enrich the schedule with the postreco step """ 1760 print(sequence,
"in preparing validation")
1762 from Validation.Configuration.autoValidation
import autoValidation
1764 sequence=sequence.split(
'.')[-1]
1765 if sequence.find(
',')!=-1:
1766 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1767 valSeqName=sequence.split(
',')[1].
split(
'+')
1772 prevalSeqName=sequence.split(
'+')
1773 valSeqName=sequence.split(
'+')
1779 postfix=
'_'+sequence
1780 prevalSeqName=[
'prevalidation'+postfix]
1781 valSeqName=[
'validation'+postfix]
1782 if not hasattr(self.
process,valSeqName[0]):
1784 valSeqName=[sequence]
1795 if (
'HLT' in self.
stepMap and not self._options.fast)
or self._options.hltProcess:
1796 for s
in valSeqName+prevalSeqName:
1799 for (i,s)
in enumerate(prevalSeqName):
1801 setattr(self.
process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.
process, s)) )
1802 self.schedule.append(getattr(self.
process,
'prevalidation_step%s'%NFI(i)))
1804 for (i,s)
in enumerate(valSeqName):
1805 setattr(self.
process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.
process, s)))
1806 self.schedule.append(getattr(self.
process,
'validation_step%s'%NFI(i)))
1812 if not 'DIGI' in self.
stepMap and not self._options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1813 if self._options.restoreRNDSeeds==
False and not self._options.restoreRNDSeeds==
True:
1814 self._options.restoreRNDSeeds=
True 1816 if not 'DIGI' in self.
stepMap and not self._options.fast:
1820 self._options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1822 if hasattr(self.
process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1824 for (i,s)
in enumerate(valSeqName):
1825 getattr(self.
process,
'validation_step%s'%NFI(i)).
insert(0, self.process.genstepfilter)
1831 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1832 It will climb down within PSets, VPSets and VInputTags to find its target""" 1833 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1840 if isinstance(pset, cms._Parameterizable):
1841 for name
in pset.parameters_().
keys():
1847 value = getattr(pset,name)
1848 type = value.pythonTypeName()
1849 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1850 self.
doIt(value,base+
"."+name)
1851 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1852 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1853 elif type
in (
'cms.string',
'cms.untracked.string'):
1857 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1858 for (i,n)
in enumerate(value):
1859 if not isinstance(n, cms.InputTag):
1866 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1867 for (i,n)
in enumerate(value):
1870 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1873 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1878 label = visitee.label()
1879 except AttributeError:
1880 label =
'<Module not in a Process>' 1882 label =
'other execption' 1883 self.
doIt(visitee, label)
1890 print(
"Replacing all InputTag %s => %s"%(oldT,newT))
1893 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1895 self.additionalCommands.append(loadMe)
1896 self.additionalCommands.append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1900 if self._options.hltProcess:
1901 proc=self._options.hltProcess
1903 proc=self.process.name_()
1904 if proc==HLTprocess:
return 1906 print(
"replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc))
1908 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.
additionalCommands:
1909 self.additionalCommands.append(
'from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1910 self.additionalCommands.append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1916 while '@' in repr(seqList)
and level<maxLevel:
1918 for specifiedCommand
in seqList:
1919 if specifiedCommand.startswith(
'@'):
1920 location=specifiedCommand[1:]
1921 if not location
in mapping:
1922 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1923 mappedTo=mapping[location]
1925 mappedTo=mappedTo[index]
1926 seqList.remove(specifiedCommand)
1927 seqList.extend(mappedTo.split(
'+'))
1930 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1936 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1937 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1938 from DQMOffline.Configuration.autoDQM
import autoDQM
1942 if len(set(sequenceList))!=len(sequenceList):
1943 sequenceList=
list(set(sequenceList))
1944 print(
"Duplicate entries for DQM:, using",sequenceList)
1946 pathName=
'dqmoffline_step' 1947 for (i,sequence)
in enumerate(sequenceList):
1949 pathName=
'dqmoffline_%d_step'%(i)
1951 if 'HLT' in self.stepMap.keys()
or self._options.hltProcess:
1954 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process,sequence ) ) )
1955 self.schedule.append(getattr(self.
process,pathName))
1957 if hasattr(self.
process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1959 getattr(self.
process,pathName).
insert(0,self.process.genstepfilter)
1961 pathName=
'dqmofflineOnPAT_step' 1962 for (i,sequence)
in enumerate(postSequenceList):
1964 pathName=
'dqmofflineOnPAT_%d_step'%(i)
1966 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process, sequence ) ) )
1967 self.schedule.append(getattr(self.
process,pathName))
1970 """ Enrich the process with harvesting step """ 1971 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self._options.harvesting+
'_cff' 1975 sequence = sequence.split(
'.')[-1]
1978 harvestingList = sequence.split(
"+")
1979 from DQMOffline.Configuration.autoDQM
import autoDQM
1980 from Validation.Configuration.autoValidation
import autoValidation
1982 combined_mapping = copy.deepcopy( autoDQM )
1983 combined_mapping.update( autoValidation )
1984 self.
expandMapping(harvestingList,combined_mapping,index=-1)
1986 if len(set(harvestingList))!=len(harvestingList):
1987 harvestingList=
list(set(harvestingList))
1988 print(
"Duplicate entries for HARVESTING, using",harvestingList)
1990 for name
in harvestingList:
1991 if not name
in harvestingConfig.__dict__:
1992 print(name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
1994 harvestingstream = getattr(harvestingConfig,name)
1995 if isinstance(harvestingstream,cms.Path):
1996 self.schedule.append(harvestingstream)
1997 self.blacklist_paths.append(harvestingstream)
1998 if isinstance(harvestingstream,cms.Sequence):
1999 setattr(self.
process,name+
"_step",cms.Path(harvestingstream))
2000 self.schedule.append(getattr(self.
process,name+
"_step"))
2006 """ Enrich the process with AlCaHarvesting step """ 2008 sequence=sequence.split(
".")[-1]
2011 harvestingList = sequence.split(
"+")
2015 from Configuration.AlCa.autoPCL
import autoPCL
2018 for name
in harvestingConfig.__dict__:
2019 harvestingstream = getattr(harvestingConfig,name)
2020 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2021 self.schedule.append(harvestingstream)
2022 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2023 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2024 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2025 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2027 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2028 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2029 harvestingList.remove(name)
2031 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2032 self.schedule.append(lastStep)
2034 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2035 print(
"The following harvesting could not be found : ", harvestingList)
2036 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2046 self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2047 self.schedule.append(self.process.reconstruction)
2051 """ Add useful info for the production. """ 2052 self.process.configurationMetadata=cms.untracked.PSet\
2053 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2054 name=cms.untracked.string(
"Applications"),
2055 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2058 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2063 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2064 self.
pythonCfgCode +=
"# with command line options: "+self._options.arguments+
'\n' 2065 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2070 modifierImports=[
'from Configuration.StandardSequences.Eras import eras']
2072 if hasattr(self.
_options,
"era")
and self._options.era :
2074 from Configuration.StandardSequences.Eras
import eras
2075 for requestedEra
in self._options.era.split(
",") :
2076 modifierStrings.append(
"eras."+requestedEra)
2077 modifiers.append(getattr(eras,requestedEra))
2080 if hasattr(self.
_options,
"procModifiers")
and self._options.procModifiers:
2083 for pm
in self._options.procModifiers.split(
','):
2084 modifierStrings.append(pm)
2085 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2086 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2089 self.
pythonCfgCode +=
"process = cms.Process('"+self._options.name+
"'" 2092 if len(modifierStrings)>0:
2099 if len(modifiers)>0:
2100 self.
process = cms.Process(self._options.name,*modifiers)
2102 self.
process = cms.Process(self._options.name)
2108 """ Prepare the configuration string and add missing pieces.""" 2120 outputModuleCfgCode=
"" 2121 if not 'HARVESTING' in self.stepMap.keys()
and not 'ALCAHARVEST' in self.stepMap.keys()
and not 'ALCAOUTPUT' in self.stepMap.keys()
and self.
with_output:
2126 self.
pythonCfgCode +=
"# import of standard configurations\n" 2131 if not hasattr(self.
process,
"configurationMetadata"):
2135 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2150 nl=sorted(self.additionalOutputs.keys())
2153 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2154 tmpOut = cms.EndPath(output)
2155 setattr(self.
process,name+
'OutPath',tmpOut)
2156 self.schedule.append(tmpOut)
2164 for object
in self._options.inlineObjets.split(
','):
2167 if not hasattr(self.
process,object):
2168 print(
'cannot inline -'+object+
'- : not known')
2175 for path
in self.process.paths:
2179 for endpath
in self.process.endpaths:
2185 result =
"process.schedule = cms.Schedule(" 2188 self.process.schedule = cms.Schedule()
2190 if not isinstance(item, cms.Schedule):
2191 self.process.schedule.append(item)
2193 self.process.schedule.extend(item)
2195 if hasattr(self.
process,
"HLTSchedule"):
2196 beforeHLT = self.
schedule[:self.schedule.index(self.process.HLTSchedule)]
2197 afterHLT = self.
schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2198 pathNames = [
'process.'+p.label_()
for p
in beforeHLT]
2199 result +=
','.
join(pathNames)+
')\n' 2200 result +=
'process.schedule.extend(process.HLTSchedule)\n' 2201 pathNames = [
'process.'+p.label_()
for p
in afterHLT]
2202 result +=
'process.schedule.extend(['+
','.
join(pathNames)+
'])\n' 2204 pathNames = [
'process.'+p.label_()
for p
in self.
schedule]
2205 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2210 self.process.schedule.associate(getattr(self.
process, labelToAssociate))
2211 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2215 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2218 if self._options.nThreads
is not "1":
2221 self.
pythonCfgCode +=
"process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+
")\n" 2222 self.
pythonCfgCode +=
"process.options.numberOfStreams=cms.untracked.uint32(0)\n" 2224 if self._options.isRepacked:
2226 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2227 self.
pythonCfgCode +=
"MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n" 2228 MassReplaceInputTag(self.
process, new=
"rawDataMapperByLabel", old=
"rawDataCollector")
2232 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2240 for path
in self.process.paths:
2249 if self._options.runUnscheduled:
2252 self.
pythonCfgCode+=
"#do not add changes to your config after this point (unless you know what you are doing)\n" 2253 self.
pythonCfgCode+=
"from FWCore.ParameterSet.Utilities import convertToUnscheduled\n" 2254 self.
pythonCfgCode+=
"process=convertToUnscheduled(process)\n" 2256 from FWCore.ParameterSet.Utilities
import convertToUnscheduled
2263 if hasattr(self.
process,
"logErrorHarvester"):
2265 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2266 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2267 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2268 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2275 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2276 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2277 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2279 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2285 if self._options.io:
2287 if not self._options.io.endswith(
'.io'): self._option.io+=
'.io' 2288 io=open(self._options.io,
'w')
2290 if hasattr(self.process.source,
"fileNames"):
2291 if len(self.process.source.fileNames.value()):
2292 ioJson[
'primary']=self.process.source.fileNames.value()
2293 if hasattr(self.process.source,
"secondaryFileNames"):
2294 if len(self.process.source.secondaryFileNames.value()):
2295 ioJson[
'secondary']=self.process.source.secondaryFileNames.value()
2296 if self._options.pileup_input
and (self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:')):
2297 ioJson[
'pileup']=self._options.pileup_input[4:]
2298 for (o,om)
in self.process.outputModules_().
items():
2299 ioJson[o]=om.fileName.value()
2300 ioJson[
'GT']=self.process.GlobalTag.globaltag.value()
2304 io.write(json.dumps(ioJson))
def load(self, includeFile)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def prepare_L1REPACK(self, sequence=None)
def finalizeFastSimHLT(self)
inliner
load the relevant part
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def customiseEarlyDelete(process)
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
S & print(S &os, JobReport::InputFile const &f)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_RECO(self, sequence="reconstruction")
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def prepare_PATGEN(self, sequence="miniGEN")
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def prepare_RECOSIM(self, sequence="recosim")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, sequence=None)
def convertToUnscheduled(proc)
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_NANO(self, sequence="nanoAOD")
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
def prepare_ALCAHARVEST(self, sequence=None)
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def completeInputCommand(self)
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def addStandardSequences(self)
def prepare_PAT(self, sequence="miniAOD")
nextScheduleIsConditional
put the filtering path in the schedule