3 from __future__
import print_function
4 __version__ =
"$Revision: 1.19 $" 5 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 7 import FWCore.ParameterSet.Config
as cms
8 from FWCore.ParameterSet.Modules
import _Module
13 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
18 from subprocess
import Popen,PIPE
19 import FWCore.ParameterSet.DictTypes
as DictTypes
25 defaultOptions.datamix =
'DataOnSim' 26 defaultOptions.isMC=
False 27 defaultOptions.isData=
True 28 defaultOptions.step=
'' 29 defaultOptions.pileup=
'NoPileUp' 30 defaultOptions.pileup_input =
None 31 defaultOptions.pileup_dasoption =
'' 32 defaultOptions.geometry =
'SimDB' 33 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
34 defaultOptions.magField =
'' 35 defaultOptions.conditions =
None 36 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
37 defaultOptions.harvesting=
'AtRunEnd' 38 defaultOptions.gflash =
False 39 defaultOptions.number = -1
40 defaultOptions.number_out =
None 41 defaultOptions.arguments =
"" 42 defaultOptions.name =
"NO NAME GIVEN" 43 defaultOptions.evt_type =
"" 44 defaultOptions.filein =
"" 45 defaultOptions.dasquery=
"" 46 defaultOptions.dasoption=
"" 47 defaultOptions.secondfilein =
"" 48 defaultOptions.customisation_file = []
49 defaultOptions.customisation_file_unsch = []
50 defaultOptions.customise_commands =
"" 51 defaultOptions.inline_custom=
False 52 defaultOptions.particleTable =
'pythiapdt' 53 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
54 defaultOptions.dirin =
'' 55 defaultOptions.dirout =
'' 56 defaultOptions.filetype =
'EDM' 57 defaultOptions.fileout =
'output.root' 58 defaultOptions.filtername =
'' 59 defaultOptions.lazy_download =
False 60 defaultOptions.custom_conditions =
'' 61 defaultOptions.hltProcess =
'' 62 defaultOptions.eventcontent =
None 63 defaultOptions.datatier =
None 64 defaultOptions.inlineEventContent =
True 65 defaultOptions.inlineObjets =
'' 66 defaultOptions.hideGen=
False 67 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
68 defaultOptions.beamspot=
None 69 defaultOptions.outputDefinition =
'' 70 defaultOptions.inputCommands =
None 71 defaultOptions.outputCommands =
None 72 defaultOptions.inputEventContent =
'' 73 defaultOptions.dropDescendant =
False 74 defaultOptions.relval =
None 75 defaultOptions.profile =
None 76 defaultOptions.isRepacked =
False 77 defaultOptions.restoreRNDSeeds =
False 78 defaultOptions.donotDropOnInput =
'' 79 defaultOptions.python_filename =
'' 80 defaultOptions.io=
None 81 defaultOptions.lumiToProcess=
None 82 defaultOptions.fast=
False 83 defaultOptions.runsAndWeightsForMC =
None 84 defaultOptions.runsScenarioForMC =
None 85 defaultOptions.runUnscheduled =
False 86 defaultOptions.timeoutOutput =
False 87 defaultOptions.nThreads =
'1' 91 theObject = getattr(process,name)
92 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
93 return "process."+name+
" = " + theObject.dumpPython(
"process")
94 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
95 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 97 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 100 import FWCore.ParameterSet.Config
as cms
103 for line
in open(fileName,
'r'): 104 if line.count(
".root")>=2:
106 entries=line.replace(
"\n",
"").
split()
107 if not entries[0]
in prim:
108 prim.append(entries[0])
109 if not entries[1]
in sec:
110 sec.append(entries[1])
111 elif (line.find(
".root")!=-1):
112 entry=line.replace(
"\n",
"")
113 if not entry
in prim:
116 if not hasattr(s,
"fileNames"):
117 s.fileNames=cms.untracked.vstring(prim)
119 s.fileNames.extend(prim)
121 if not hasattr(s,
"secondaryFileNames"):
122 s.secondaryFileNames=cms.untracked.vstring(sec)
124 s.secondaryFileNames.extend(sec)
125 print(
"found files: ",prim)
127 raise Exception(
"There are not files in input from the file list")
129 print(
"found parent files:",sec)
134 import FWCore.ParameterSet.Config
as cms
137 print(
"the query is",query)
140 while eC!=0
and count<3:
142 print(
'Sleeping, then retrying DAS')
144 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True)
146 tupleP = os.waitpid(p.pid, 0)
150 print(
"DAS succeeded after",count,
"attempts",eC)
152 print(
"DAS failed 3 times- I give up")
153 for line
in pipe.split(
'\n'):
154 if line.count(
".root")>=2:
156 entries=line.replace(
"\n",
"").
split()
157 if not entries[0]
in prim:
158 prim.append(entries[0])
159 if not entries[1]
in sec:
160 sec.append(entries[1])
161 elif (line.find(
".root")!=-1):
162 entry=line.replace(
"\n",
"")
163 if not entry
in prim:
166 if not hasattr(s,
"fileNames"):
167 s.fileNames=cms.untracked.vstring(prim)
169 s.fileNames.extend(prim)
171 if not hasattr(s,
"secondaryFileNames"):
172 s.secondaryFileNames=cms.untracked.vstring(sec)
174 s.secondaryFileNames.extend(sec)
175 print(
"found files: ",prim)
177 print(
"found parent files:",sec)
180 def anyOf(listOfKeys,dict,opt=None):
189 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
192 """The main building routines """ 194 def __init__(self, options, process = None, with_output = False, with_input = False ):
195 """options taken from old cmsDriver and optparse """ 197 options.outfile_name = options.dirout+options.fileout
201 if self._options.isData
and options.isMC:
202 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
207 if 'ENDJOB' in self._options.step:
208 if (hasattr(self.
_options,
"outputDefinition")
and \
209 self._options.outputDefinition !=
'' and \
210 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self._options.outputDefinition)))
or \
211 (hasattr(self.
_options,
"datatier")
and \
212 self._options.datatier
and \
213 'DQMIO' in self._options.datatier):
214 print(
"removing ENDJOB from steps since not compatible with DQMIO dataTier")
215 self._options.step=self._options.step.replace(
',ENDJOB',
'')
220 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
223 for step
in self._options.step.split(
","):
224 if step==
'':
continue 225 stepParts = step.split(
":")
226 stepName = stepParts[0]
227 if stepName
not in stepList
and not stepName.startswith(
're'):
228 raise ValueError(
"Step "+stepName+
" unknown")
229 if len(stepParts)==1:
231 elif len(stepParts)==2:
233 elif len(stepParts)==3:
234 self.
stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
236 raise ValueError(
"Step definition "+step+
" invalid")
237 self.stepKeys.append(stepName)
244 if hasattr(self.
_options,
"no_output_flag")
and self._options.no_output_flag:
271 Function to add the igprof profile service so that you can dump in the middle 274 profileOpts = self._options.profile.split(
':')
276 profilerInterval = 100
277 profilerFormat =
None 278 profilerJobFormat =
None 284 startEvent = profileOpts.pop(0)
285 if not startEvent.isdigit():
286 raise Exception(
"%s is not a number" % startEvent)
287 profilerStart =
int(startEvent)
289 eventInterval = profileOpts.pop(0)
290 if not eventInterval.isdigit():
291 raise Exception(
"%s is not a number" % eventInterval)
292 profilerInterval =
int(eventInterval)
294 profilerFormat = profileOpts.pop(0)
297 if not profilerFormat:
298 profilerFormat =
"%s___%s___%s___%s___%s___%s___%%I.gz" % (self._options.evt_type.replace(
"_cfi",
""),
300 self._options.pileup,
301 self._options.conditions,
302 self._options.datatier,
303 self._options.profileTypeLabel)
304 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
305 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
306 elif not profilerJobFormat:
307 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 309 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
312 includeFile = includeFile.replace(
'/',
'.')
313 self.process.load(includeFile)
314 return sys.modules[includeFile]
317 """helper routine to load am memorize imports""" 320 includeFile = includeFile.replace(
'/',
'.')
321 self.imports.append(includeFile)
322 self.process.load(includeFile)
323 return sys.modules[includeFile]
326 """helper routine to remember replace statements""" 327 self.additionalCommands.append(command)
328 if not command.strip().startswith(
"#"):
331 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
335 if 'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys():
336 self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring(
'ProductNotFound'),fileMode = cms.untracked.string(
'FULLMERGE'))
338 self.process.options = cms.untracked.PSet( )
340 self.addedObjects.append((
"",
"options"))
342 if self._options.lazy_download:
343 self.process.AdaptorConfig = cms.Service(
"AdaptorConfig",
344 stats = cms.untracked.bool(
True),
345 enable = cms.untracked.bool(
True),
346 cacheHint = cms.untracked.string(
"lazy-download"),
347 readHint = cms.untracked.string(
"read-ahead-buffered")
349 self.addedObjects.append((
"Setup lazy download",
"AdaptorConfig"))
354 if self._options.profile:
356 self.process.IgProfService = cms.Service(
"IgProfService",
357 reportFirstEvent = cms.untracked.int32(start),
358 reportEventInterval = cms.untracked.int32(interval),
359 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
360 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
361 self.addedObjects.append((
"Setup IGProf Service for profiling",
"IgProfService"))
364 """Here we decide how many evts will be processed""" 365 self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(
int(self._options.number)))
366 if self._options.number_out:
367 self.process.maxEvents.output = cms.untracked.int32(
int(self._options.number_out))
368 self.addedObjects.append((
"",
"maxEvents"))
371 """Here the source is built. Priority: file, generator""" 372 self.addedObjects.append((
"Input source",
"source"))
374 def filesFromOption(self):
375 for entry
in self._options.filein.split(
','):
377 if entry.startswith(
"filelist:"):
379 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
380 filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
382 self.process.source.fileNames.append(self._options.dirin+entry)
383 if self._options.secondfilein:
384 if not hasattr(self.process.source,
"secondaryFileNames"):
385 raise Exception(
"--secondfilein not compatible with "+self._options.filetype+
"input type")
386 for entry
in self._options.secondfilein.split(
','):
388 if entry.startswith(
"filelist:"):
389 self.process.source.secondaryFileNames.extend((
filesFromList(entry[9:]))[0])
390 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
391 self.process.source.secondaryFileNames.extend((
filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
393 self.process.source.secondaryFileNames.append(self._options.dirin+entry)
395 if self._options.filein
or self._options.dasquery:
396 if self._options.filetype ==
"EDM":
397 self.process.source=cms.Source(
"PoolSource",
398 fileNames = cms.untracked.vstring(),
399 secondaryFileNames= cms.untracked.vstring())
400 filesFromOption(self)
401 elif self._options.filetype ==
"DAT":
402 self.process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
403 filesFromOption(self)
404 elif self._options.filetype ==
"LHE":
405 self.process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
406 if self._options.filein.startswith(
"lhe:"):
408 args=self._options.filein.split(
':')
410 print(
'LHE input from article ',article)
411 location=
'/store/lhe/' 413 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
414 for line
in textOfFiles:
415 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
416 self.process.source.fileNames.append(location+article+
'/'+fileName)
419 print(
'Issue to load LHE files, please check and try again.')
422 if len(self.process.source.fileNames)==0:
423 print(
'Issue with empty filename, but can pass line check')
426 self.process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
428 filesFromOption(self)
430 elif self._options.filetype ==
"DQM":
431 self.process.source=cms.Source(
"DQMRootSource",
432 fileNames = cms.untracked.vstring())
433 filesFromOption(self)
435 elif self._options.filetype ==
"DQMDAQ":
437 self.process.source=cms.Source(
"DQMStreamerReader")
440 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
441 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
443 if self._options.dasquery!=
'':
444 self.process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
445 filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
447 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
448 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
451 if 'GEN' in self.stepMap.keys():
452 if self._options.inputCommands:
453 self._options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 455 self._options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 457 if self.process.source
and self._options.inputCommands:
458 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
459 for command
in self._options.inputCommands.split(
','):
461 command = command.strip()
462 if command==
'':
continue 463 self.process.source.inputCommands.append(command)
464 if not self._options.dropDescendant:
465 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
467 if self._options.lumiToProcess:
468 import FWCore.PythonUtilities.LumiList
as LumiList
469 self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange(
LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().
split(
',') )
471 if 'GEN' in self.stepMap.keys()
or 'LHE' in self.
stepMap or (
not self._options.filein
and hasattr(self.
_options,
"evt_type")):
472 if self.process.source
is None:
473 self.process.source=cms.Source(
"EmptySource")
477 if self._options.runsAndWeightsForMC
or self._options.runsScenarioForMC :
478 if not self._options.isMC :
479 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
480 if self._options.runsAndWeightsForMC:
483 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
484 if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
485 __import__(RunsAndWeights[self._options.runsScenarioForMC])
486 self.
runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
488 self.
runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
491 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
493 self.additionalCommands.append(
'import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
494 self.additionalCommands.append(
'ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.
runsAndWeights))
499 """ Add output module to the process """ 501 if self._options.outputDefinition:
502 if self._options.datatier:
503 print(
"--datatier & --eventcontent options ignored")
506 outList = eval(self._options.outputDefinition)
507 for (id,outDefDict)
in enumerate(outList):
508 outDefDictStr=outDefDict.__str__()
509 if not isinstance(outDefDict,dict):
510 raise Exception(
"--output needs to be passed a list of dict"+self._options.outputDefinition+
" is invalid")
512 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
515 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
516 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
517 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
518 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
519 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
521 if not theModuleLabel:
522 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
523 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
524 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 526 for name
in tryNames:
527 if not hasattr(self.
process,name):
530 if not theModuleLabel:
531 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
533 defaultFileName=self._options.outfile_name
535 defaultFileName=self._options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
537 theFileName=self._options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
538 if not theFileName.endswith(
'.root'):
542 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
543 if theStreamType==
'DQMIO': theStreamType=
'DQM' 544 if theStreamType==
'ALL':
545 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
547 theEventContent = getattr(self.
process, theStreamType+
"EventContent")
551 if theStreamType==
'ALCARECO' and not theFilterName:
552 theFilterName=
'StreamALCACombined' 555 CppType=
'PoolOutputModule' 556 if self._options.timeoutOutput:
557 CppType=
'TimeoutPoolOutputModule' 558 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 559 output = cms.OutputModule(CppType,
560 theEventContent.clone(),
561 fileName = cms.untracked.string(theFileName),
562 dataset = cms.untracked.PSet(
563 dataTier = cms.untracked.string(theTier),
564 filterName = cms.untracked.string(theFilterName))
566 if not theSelectEvent
and hasattr(self.
process,
'generation_step')
and theStreamType!=
'LHE':
567 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
568 if not theSelectEvent
and hasattr(self.
process,
'filtering_step'):
569 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
571 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
574 if not hasattr(output,
'SelectEvents'):
575 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
577 output.SelectEvents.SelectEvents.extend(getattr(self.
process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
580 if hasattr(self.
process,theModuleLabel):
581 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
583 setattr(self.
process,theModuleLabel,output)
584 outputModule=getattr(self.
process,theModuleLabel)
585 setattr(self.
process,theModuleLabel+
'_step',cms.EndPath(outputModule))
586 path=getattr(self.
process,theModuleLabel+
'_step')
587 self.schedule.append(path)
589 if not self._options.inlineEventContent
and hasattr(self.
process,theStreamType+
"EventContent"):
590 def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process.
"+theStreamType+"EventContent.outputCommands)
"): 592 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
593 if theExtraOutputCommands:
594 if not isinstance(theExtraOutputCommands,list):
595 raise Exception(
"extra ouput command in --option must be a list of strings")
596 if hasattr(self.
process,theStreamType+
"EventContent"):
597 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
599 outputModule.outputCommands.extend(theExtraOutputCommands)
601 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
606 streamTypes=self._options.eventcontent.split(
',')
607 tiers=self._options.datatier.split(
',')
608 if not self._options.outputDefinition
and len(streamTypes)!=len(tiers):
609 raise Exception(
"number of event content arguments does not match number of datatier arguments")
612 if self._options.step.split(
',')[0].
split(
':')[0] ==
'ALCA':
615 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
616 if streamType==
'':
continue 617 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self._options.step:
continue 618 if streamType==
'DQMIO': streamType=
'DQM' 619 eventContent=streamType
621 if streamType ==
"NANOEDMAOD" :
622 eventContent =
"NANOAOD" 623 elif streamType ==
"NANOEDMAODSIM" :
624 eventContent =
"NANOAODSIM" 625 theEventContent = getattr(self.
process, eventContent+
"EventContent")
627 theFileName=self._options.outfile_name
628 theFilterName=self._options.filtername
630 theFileName=self._options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
631 theFilterName=self._options.filtername
632 CppType=
'PoolOutputModule' 633 if self._options.timeoutOutput:
634 CppType=
'TimeoutPoolOutputModule' 635 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 636 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 637 output = cms.OutputModule(CppType,
639 fileName = cms.untracked.string(theFileName),
640 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
641 filterName = cms.untracked.string(theFilterName)
644 if hasattr(self.
process,
"generation_step")
and streamType!=
'LHE':
645 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
646 if hasattr(self.
process,
"filtering_step"):
647 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
649 if streamType==
'ALCARECO':
650 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
652 if "MINIAOD" in streamType:
653 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
656 outputModuleName=streamType+
'output' 657 setattr(self.
process,outputModuleName,output)
658 outputModule=getattr(self.
process,outputModuleName)
659 setattr(self.
process,outputModuleName+
'_step',cms.EndPath(outputModule))
660 path=getattr(self.
process,outputModuleName+
'_step')
661 self.schedule.append(path)
663 if self._options.outputCommands
and streamType!=
'DQM':
664 for evct
in self._options.outputCommands.split(
','):
665 if not evct:
continue 666 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
668 if not self._options.inlineEventContent:
669 tmpstreamType=streamType
670 if "NANOEDM" in tmpstreamType :
671 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
672 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
674 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
676 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
682 Add selected standard sequences to the process 685 if self._options.pileup:
686 pileupSpec=self._options.pileup.split(
',')[0]
689 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
690 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
691 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
695 if '.' in pileupSpec:
696 mixingDict={
'file':pileupSpec}
697 elif pileupSpec.startswith(
'file:'):
698 mixingDict={
'file':pileupSpec[5:]}
701 mixingDict=copy.copy(Mixing[pileupSpec])
702 if len(self._options.pileup.split(
','))>1:
703 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(
',')+1:]))
706 if 'file:' in pileupSpec:
708 self.process.load(mixingDict[
'file'])
709 print(
"inlining mixing module configuration")
710 self._options.inlineObjets+=
',mix' 714 mixingDict.pop(
'file')
715 if not "DATAMIX" in self.stepMap.keys():
716 if self._options.pileup_input:
717 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
718 mixingDict[
'F']=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
719 elif self._options.pileup_input.startswith(
"filelist:"):
720 mixingDict[
'F']=(
filesFromList(self._options.pileup_input[9:]))[0]
722 mixingDict[
'F']=self._options.pileup_input.split(
',')
724 for command
in specialization:
726 if len(mixingDict)!=0:
727 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
734 if (
'SIM' in self.
stepMap or 'reSIM' in self.
stepMap)
and not self._options.fast:
739 print(
"Geometry option",self._options.geometry,
"unknown.")
746 stepSpec = self.
stepMap[stepName]
747 print(
"Step:", stepName,
"Spec:",stepSpec)
748 if stepName.startswith(
're'):
750 if stepName[2:]
not in self._options.donotDropOnInput:
751 self._options.inputEventContent=
'%s,%s'%(stepName.upper(),self._options.inputEventContent)
752 stepName=stepName[2:]
754 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
755 elif isinstance(stepSpec, list):
756 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
757 elif isinstance(stepSpec, tuple):
758 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
760 raise ValueError(
"Invalid step definition")
762 if self._options.restoreRNDSeeds!=
False:
764 if self._options.restoreRNDSeeds==
True:
765 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
767 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
768 if self._options.inputEventContent
or self._options.inputCommands:
769 if self._options.inputCommands:
770 self._options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 772 self._options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 776 if self._options.inputEventContent:
778 def dropSecondDropStar(iec):
789 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
790 for evct
in self._options.inputEventContent.split(
','):
791 if evct==
'':
continue 792 theEventContent = getattr(self.
process, evct+
"EventContent")
793 if hasattr(theEventContent,
'outputCommands'):
794 self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
795 if hasattr(theEventContent,
'inputCommands'):
796 self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
798 dropSecondDropStar(self.process.source.inputCommands)
800 if not self._options.dropDescendant:
801 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
807 """Add conditions to the process""" 808 if not self._options.conditions:
return 810 if 'FrontierConditions_GlobalTag' in self._options.conditions:
811 print(
'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
812 self._options.conditions = self._options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
816 self.process.GlobalTag =
GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
817 self.additionalCommands.append(
'from Configuration.AlCa.GlobalTag import GlobalTag')
818 self.additionalCommands.append(
'process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
822 """Include the customise code """ 826 for c
in self._options.customisation_file:
827 custOpt.extend(c.split(
","))
829 for c
in self._options.customisation_file_unsch:
830 custOpt.extend(c.split(
","))
836 raise Exception(
"more than . in the specification:"+opt)
837 fileName=opt.split(
'.')[0]
838 if opt.count(
'.')==0: rest=
'customise' 840 rest=opt.split(
'.')[1]
841 if rest==
'py': rest=
'customise' 843 if fileName
in custMap:
844 custMap[fileName].extend(rest.split(
'+'))
846 custMap[fileName]=rest.split(
'+')
851 final_snippet=
'\n# customisation of the process.\n' 855 allFcn.extend(custMap[opt])
857 if allFcn.count(fcn)!=1:
858 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
862 packageName = f.replace(
".py",
"").
replace(
"/",
".")
863 __import__(packageName)
864 package = sys.modules[packageName]
867 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
869 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 870 if self._options.inline_custom:
871 for line
in file(customiseFile,
'r'): 872 if "import FWCore.ParameterSet.Config" in line:
874 final_snippet += line
876 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
877 for fcn
in custMap[f]:
878 print(
"customising the process with",fcn,
"from",f)
879 if not hasattr(package,fcn):
881 raise Exception(
"config "+f+
" has no function "+fcn)
885 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
886 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
889 final_snippet +=
'\n# End of customisation functions\n' 895 final_snippet=
'\n# Customisation from command line\n' 896 if self._options.customise_commands:
898 for com
in self._options.customise_commands.split(
'\\n'):
899 com=string.lstrip(com)
901 final_snippet +=
'\n'+com
912 if self._options.particleTable
not in defaultOptions.particleTableList:
913 print(
'Invalid particle table provided. Options are:')
914 print(defaultOptions.particleTable)
918 self.
loadAndRemember(
'SimGeneral.HepPDTESSource.'+self._options.particleTable+
'_cfi')
931 if self._options.isRepacked: self.
RAW2DIGIDefaultCFF=
"Configuration/StandardSequences/RawToDigi_DataMapper_cff" 951 if "DATAMIX" in self.stepMap.keys():
955 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 973 if self._options.fast
or (
'RAW2DIGI' in self.
stepMap and 'RECO' in self.
stepMap):
991 if not self._options.beamspot:
992 self._options.beamspot=VtxSmearedDefaultKey
995 if self._options.isMC==
True:
997 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 998 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 1001 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1004 self._options.beamspot =
None 1010 if self._options.scenario==
'cosmics':
1011 self._options.pileup=
'Cosmics' 1012 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1013 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1014 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1018 if self._options.isMC==
True:
1024 if self._options.scenario==
'HeavyIons':
1025 if not self._options.beamspot:
1026 self._options.beamspot=VtxSmearedHIDefaultKey
1031 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1033 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1036 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1038 if self._options.isMC==
True:
1048 if self._options.isData:
1049 if self._options.magField==defaultOptions.magField:
1050 print(
"magnetic field option forced to: AutoFromDBCurrent")
1051 self._options.magField=
'AutoFromDBCurrent' 1052 self.
magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace(
'.',
'')+
'_cff' 1053 self.
magFieldCFF = self.magFieldCFF.replace(
"__",
'_')
1059 if self._options.fast:
1060 if 'start' in self._options.conditions.lower():
1061 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1063 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1065 def inGeometryKeys(opt):
1066 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1067 if opt
in GeometryConf:
1068 return GeometryConf[opt]
1072 geoms=self._options.geometry.split(
',')
1073 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).
split(
',')
1076 if '/' in geoms[1]
or '_cff' in geoms[1]:
1079 self.
GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1081 if (geoms[0].startswith(
'DB:')):
1086 if '/' in geoms[0]
or '_cff' in geoms[0]:
1089 simGeometry=geoms[0]
1090 if self._options.gflash==
True:
1091 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1093 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1096 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1097 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1099 if self._options.scenario==
'nocoll' or self._options.scenario==
'cosmics':
1100 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1101 self._options.beamspot=
'NoSmear' 1104 if self._options.fast:
1105 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1106 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1112 if self._options.pileup==
'default':
1113 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1114 self._options.pileup=MixingDefaultKey
1118 if self._options.isData:
1119 self._options.pileup=
None 1127 output = cms.OutputModule(
"PoolOutputModule")
1128 if stream.selectEvents.parameters_().__len__()!=0:
1129 output.SelectEvents = stream.selectEvents
1131 output.SelectEvents = cms.untracked.PSet()
1132 output.SelectEvents.SelectEvents=cms.vstring()
1133 if isinstance(stream.paths,tuple):
1134 for path
in stream.paths:
1135 output.SelectEvents.SelectEvents.append(path.label())
1137 output.SelectEvents.SelectEvents.append(stream.paths.label())
1141 if isinstance(stream.content,str):
1142 evtPset=getattr(self.process,stream.content)
1143 for p
in evtPset.parameters_():
1144 setattr(output,p,getattr(evtPset,p))
1145 if not self._options.inlineEventContent:
1146 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1148 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1150 output.outputCommands = stream.content
1153 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1155 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1156 filterName = cms.untracked.string(stream.name))
1158 if self._options.filtername:
1159 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1162 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1164 if workflow
in (
"producers,full"):
1165 if isinstance(stream.paths,tuple):
1166 for path
in stream.paths:
1167 self.schedule.append(path)
1169 self.schedule.append(stream.paths)
1173 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1174 self.additionalOutputs[name] = output
1175 setattr(self.process,name,output)
1177 if workflow ==
'output':
1179 filterList = output.SelectEvents.SelectEvents
1180 for i, filter
in enumerate(filterList):
1181 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1191 if ( len(sequence.split(
'.'))==1 ):
1193 elif ( len(sequence.split(
'.'))==2 ):
1195 sequence=sequence.split(
'.')[1]
1197 print(
"sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1198 print(sequence,
"not recognized")
1205 for i,s
in enumerate(seq.split(
'*')):
1207 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, s) ))
1209 p=getattr(self.
process,prefix)
1211 self.schedule.append(getattr(self.
process,prefix))
1217 self.conditionalPaths.append(prefix)
1218 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, seq) ))
1219 self.schedule.append(getattr(self.
process,prefix))
1221 for i,s
in enumerate(seq.split(
'+')):
1223 setattr(self.
process,sn,getattr(cms,what)( getattr(self.
process, s) ))
1224 self.schedule.append(getattr(self.
process,sn))
1238 """ Enrich the process with alca streams """ 1240 sequence = sequence.split(
'.')[-1]
1243 alcaList = sequence.split(
"+")
1245 from Configuration.AlCa.autoAlca
import autoAlca
1249 for name
in alcaConfig.__dict__:
1250 alcastream = getattr(alcaConfig,name)
1251 shortName = name.replace(
'ALCARECOStream',
'')
1252 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1253 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1254 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1255 self.AlCaPaths.append(shortName)
1256 if 'DQM' in alcaList:
1257 if not self._options.inlineEventContent
and hasattr(self.
process,name):
1258 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1260 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1263 if self._options.hltProcess
or 'HLT' in self.
stepMap:
1264 if isinstance(alcastream.paths,tuple):
1265 for path
in alcastream.paths:
1270 for i
in range(alcaList.count(shortName)):
1271 alcaList.remove(shortName)
1274 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1275 path = getattr(alcaConfig,name)
1276 self.schedule.append(path)
1277 alcaList.remove(
'DQM')
1279 if isinstance(alcastream,cms.Path):
1281 self.blacklist_paths.append(alcastream)
1284 if len(alcaList) != 0:
1286 for name
in alcaConfig.__dict__:
1287 alcastream = getattr(alcaConfig,name)
1288 if isinstance(alcastream,cms.FilteredStream):
1289 available.append(name.replace(
'ALCARECOStream',
''))
1290 print(
"The following alcas could not be found "+
str(alcaList))
1291 print(
"available ",available)
1293 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1298 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1299 print(
"Loading lhe fragment from",loadFragment)
1300 __import__(loadFragment)
1301 self.process.load(loadFragment)
1303 self._options.inlineObjets+=
','+sequence
1305 getattr(self.process,sequence).nEvents =
int(self._options.number)
1308 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1309 self.excludedPaths.append(
"lhe_step")
1310 self.schedule.append( self.process.lhe_step )
1313 """ load the fragment of generator configuration """ 1318 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'')
1320 if not '/' in loadFragment:
1321 loadFragment=
'Configuration.Generator.'+loadFragment
1323 loadFragment=loadFragment.replace(
'/',
'.')
1325 print(
"Loading generator fragment from",loadFragment)
1326 __import__(loadFragment)
1330 if not (self._options.filein
or self._options.dasquery):
1331 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1334 generatorModule=sys.modules[loadFragment]
1335 genModules=generatorModule.__dict__
1341 if self._options.hideGen:
1344 self.process.load(loadFragment)
1346 import FWCore.ParameterSet.Modules
as cmstypes
1347 for name
in genModules:
1348 theObject = getattr(generatorModule,name)
1349 if isinstance(theObject, cmstypes._Module):
1350 self._options.inlineObjets=name+
','+self._options.inlineObjets
1351 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1352 self._options.inlineObjets+=
','+name
1354 if sequence == self.
GENDefaultSeq or sequence ==
'pgen_genonly':
1355 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1357 elif 'generator' in genModules:
1360 """ Enrich the schedule with the rest of the generation step """ 1362 genSeqName=sequence.split(
'.')[-1]
1366 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1367 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1370 raise Exception(
"VertexSmearing type or beamspot "+self._options.beamspot+
" unknown.")
1372 if self._options.scenario ==
'HeavyIons':
1373 if self._options.pileup==
'HiMixGEN':
1374 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1376 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1378 self.process.generation_step = cms.Path( getattr(self.
process,genSeqName) )
1379 self.schedule.append(self.process.generation_step)
1382 self.
executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1388 """ Enrich the schedule with the summary of the filter step """ 1395 """ Enrich the schedule with the simulation step""" 1397 if not self._options.fast:
1398 if self._options.gflash==
True:
1401 if self._options.magField==
'0T':
1404 if self._options.magField==
'0T':
1405 self.
executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1411 """ Enrich the schedule with the digitisation step""" 1414 if self._options.gflash==
True:
1415 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1417 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1418 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1420 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and sequence !=
'pdigi_hi_nogen' and not self.process.source.type_()==
'EmptySource':
1421 if self._options.inputEventContent==
'':
1422 self._options.inputEventContent=
'REGEN' 1424 self._options.inputEventContent=self._options.inputEventContent+
',REGEN' 1431 """ Enrich the schedule with the crossing frame writer step""" 1437 """ Enrich the schedule with the digitisation step""" 1441 if self._options.pileup_input:
1443 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
1444 theFiles=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1445 elif self._options.pileup_input.startswith(
"filelist:"):
1446 theFiles= (
filesFromList(self._options.pileup_input[9:]))[0]
1448 theFiles=self._options.pileup_input.split(
',')
1450 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1465 """ Enrich the schedule with the L1 simulation step""" 1466 assert(sequence ==
None)
1472 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1473 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1474 if sequence
in supported:
1475 self.
loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1476 if self._options.scenario ==
'HeavyIons':
1480 print(
"L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported)
1485 """ Enrich the schedule with the HLT simulation step""" 1487 print(
"no specification of the hlt menu has been given, should never happen")
1488 raise Exception(
'no HLT sequence provided')
1492 from Configuration.HLT.autoHLT
import autoHLT
1495 sequence = autoHLT[key]
1497 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1503 if self._options.scenario ==
'HeavyIons':
1504 optionsForHLT[
'type'] =
'HIon' 1506 optionsForHLT[
'type'] =
'GRun' 1507 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in six.iteritems(optionsForHLT))
1508 if sequence ==
'run,fromSource':
1509 if hasattr(self.process.source,
'firstRun'):
1510 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1511 elif hasattr(self.process.source,
'setRunNumber'):
1512 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1514 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1516 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1520 if self._options.isMC:
1521 self._options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1523 if self._options.name !=
'HLT':
1524 self.additionalCommands.append(
'from HLTrigger.Configuration.CustomConfigs import ProcessName')
1525 self.additionalCommands.append(
'process = ProcessName(process)')
1526 self.additionalCommands.append(
'')
1527 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1530 self.schedule.append(self.process.HLTSchedule)
1531 [self.blacklist_paths.append(path)
for path
in self.process.HLTSchedule
if isinstance(path,(cms.Path,cms.EndPath))]
1534 if self._options.fast:
1535 if not hasattr(self.
process,
'HLTEndSequence'):
1536 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1541 seqReco=sequence.split(
',')[1]
1542 seqDigi=sequence.split(
',')[0]
1544 print(
"RAW2RECO requires two specifications",sequence,
"insufficient")
1558 self.
loadAndRemember(
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1560 for filt
in allMetFilterPaths:
1561 self.schedule.append(getattr(self.
process,
'Flag_'+filt))
1564 ''' Enrich the schedule with L1 HW validation ''' 1567 print(
'\n\n\n DEPRECATED this has no action \n\n\n')
1571 ''' Enrich the schedule with L1 reconstruction ''' 1577 ''' Enrich the schedule with L1 reconstruction ''' 1583 ''' Enrich the schedule with a user defined filter sequence ''' 1585 filterConfig=self.
load(sequence.split(
'.')[0])
1586 filterSeq=sequence.split(
'.')[-1]
1588 class PrintAllModules(
object):
1592 def enter(self,visitee):
1594 label=visitee.label()
1599 def leave(self,v):
pass 1601 expander=PrintAllModules()
1603 self._options.inlineObjets+=
','+expander.inliner
1604 self._options.inlineObjets+=
','+filterSeq
1615 ''' Enrich the schedule with reconstruction ''' 1621 ''' Enrich the schedule with reconstruction ''' 1627 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1628 if not self._options.fast:
1629 print(
"ERROR: this step is only implemented for FastSim")
1632 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1636 ''' Enrich the schedule with PAT ''' 1639 self.labelsToAssociate.append(
'patTask')
1640 if not self._options.runUnscheduled:
1641 raise Exception(
"MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1642 if self._options.isData:
1643 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1645 if self._options.fast:
1646 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1648 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1650 if self._options.hltProcess:
1651 if len(self._options.customise_commands) > 1:
1652 self._options.customise_commands = self._options.customise_commands +
" \n" 1653 self._options.customise_commands = self._options.customise_commands +
"process.patTrigger.processName = \""+self._options.hltProcess+
"\"\n" 1654 self._options.customise_commands = self._options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1655 self._options.customise_commands = self._options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1662 ''' Enrich the schedule with PATGEN ''' 1664 self.labelsToAssociate.append(
'patGENTask')
1665 if not self._options.runUnscheduled:
1666 raise Exception(
"MiniGEN production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1667 if self._options.isData:
1668 raise Exception(
"PATGEN step can only run on MC")
1672 ''' Enrich the schedule with NANO ''' 1675 custom =
"nanoAOD_customizeData" if self._options.isData
else "nanoAOD_customizeMC" 1676 if self._options.runUnscheduled:
1677 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1679 self._options.customisation_file.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1680 if self._options.hltProcess:
1681 if len(self._options.customise_commands) > 1:
1682 self._options.customise_commands = self._options.customise_commands +
" \n" 1683 self._options.customise_commands = self._options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1687 ''' Enrich the schedule with event interpretation ''' 1688 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1689 if sequence
in EventInterpretation:
1691 sequence =
'EIsequence' 1693 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1699 ''' Enrich the schedule with skimming fragments''' 1701 sequence = sequence.split(
'.')[-1]
1703 skimlist=sequence.split(
'+')
1705 from Configuration.Skimming.autoSkim
import autoSkim
1709 for skim
in skimConfig.__dict__:
1710 skimstream = getattr(skimConfig,skim)
1711 if isinstance(skimstream,cms.Path):
1713 self.blacklist_paths.append(skimstream)
1714 if (
not isinstance(skimstream,cms.FilteredStream)):
1716 shortname = skim.replace(
'SKIMStream',
'')
1717 if (sequence==
"all"):
1719 elif (shortname
in skimlist):
1722 if self._options.datatier==
'DQM':
1724 skimstreamDQM = cms.FilteredStream(
1725 responsible = skimstream.responsible,
1726 name = skimstream.name+
'DQM',
1727 paths = skimstream.paths,
1728 selectEvents = skimstream.selectEvents,
1729 content = self._options.datatier+
'EventContent',
1730 dataTier = cms.untracked.string(self._options.datatier)
1733 for i
in range(skimlist.count(shortname)):
1734 skimlist.remove(shortname)
1738 if (skimlist.__len__()!=0
and sequence!=
"all"):
1739 print(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1740 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1743 ''' Enrich the schedule with a user defined sequence ''' 1749 """ Enrich the schedule with the postreco step """ 1756 print(sequence,
"in preparing validation")
1758 from Validation.Configuration.autoValidation
import autoValidation
1760 sequence=sequence.split(
'.')[-1]
1761 if sequence.find(
',')!=-1:
1762 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1763 valSeqName=sequence.split(
',')[1].
split(
'+')
1768 prevalSeqName=sequence.split(
'+')
1769 valSeqName=sequence.split(
'+')
1775 postfix=
'_'+sequence
1776 prevalSeqName=[
'prevalidation'+postfix]
1777 valSeqName=[
'validation'+postfix]
1778 if not hasattr(self.
process,valSeqName[0]):
1780 valSeqName=[sequence]
1791 if (
'HLT' in self.
stepMap and not self._options.fast)
or self._options.hltProcess:
1792 for s
in valSeqName+prevalSeqName:
1795 for (i,s)
in enumerate(prevalSeqName):
1797 setattr(self.
process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.
process, s)) )
1798 self.schedule.append(getattr(self.
process,
'prevalidation_step%s'%NFI(i)))
1800 for (i,s)
in enumerate(valSeqName):
1801 setattr(self.
process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.
process, s)))
1802 self.schedule.append(getattr(self.
process,
'validation_step%s'%NFI(i)))
1808 if not 'DIGI' in self.
stepMap and not self._options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1809 if self._options.restoreRNDSeeds==
False and not self._options.restoreRNDSeeds==
True:
1810 self._options.restoreRNDSeeds=
True 1812 if not 'DIGI' in self.
stepMap and not self._options.fast:
1816 self._options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1818 if hasattr(self.
process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1820 for (i,s)
in enumerate(valSeqName):
1821 getattr(self.
process,
'validation_step%s'%NFI(i))._seq = self.process.genstepfilter * getattr(self.
process,
'validation_step%s'%NFI(i))._seq
1827 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1828 It will climb down within PSets, VPSets and VInputTags to find its target""" 1829 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1836 if isinstance(pset, cms._Parameterizable):
1837 for name
in pset.parameters_().
keys():
1843 value = getattr(pset,name)
1844 type = value.pythonTypeName()
1845 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1846 self.
doIt(value,base+
"."+name)
1847 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1848 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1849 elif type
in (
'cms.string',
'cms.untracked.string'):
1853 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1854 for (i,n)
in enumerate(value):
1855 if not isinstance(n, cms.InputTag):
1862 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1863 for (i,n)
in enumerate(value):
1866 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1869 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1874 label = visitee.label()
1875 except AttributeError:
1876 label =
'<Module not in a Process>' 1878 label =
'other execption' 1879 self.
doIt(visitee, label)
1886 print(
"Replacing all InputTag %s => %s"%(oldT,newT))
1889 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1891 self.additionalCommands.append(loadMe)
1892 self.additionalCommands.append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1896 if self._options.hltProcess:
1897 proc=self._options.hltProcess
1899 proc=self.process.name_()
1900 if proc==HLTprocess:
return 1902 print(
"replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc))
1904 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.
additionalCommands:
1905 self.additionalCommands.append(
'from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1906 self.additionalCommands.append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1912 while '@' in repr(seqList)
and level<maxLevel:
1914 for specifiedCommand
in seqList:
1915 if specifiedCommand.startswith(
'@'):
1916 location=specifiedCommand[1:]
1917 if not location
in mapping:
1918 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1919 mappedTo=mapping[location]
1921 mappedTo=mappedTo[index]
1922 seqList.remove(specifiedCommand)
1923 seqList.extend(mappedTo.split(
'+'))
1926 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1932 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1933 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1934 from DQMOffline.Configuration.autoDQM
import autoDQM
1938 if len(set(sequenceList))!=len(sequenceList):
1939 sequenceList=
list(set(sequenceList))
1940 print(
"Duplicate entries for DQM:, using",sequenceList)
1942 pathName=
'dqmoffline_step' 1943 for (i,sequence)
in enumerate(sequenceList):
1945 pathName=
'dqmoffline_%d_step'%(i)
1947 if 'HLT' in self.stepMap.keys()
or self._options.hltProcess:
1950 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process,sequence ) ) )
1951 self.schedule.append(getattr(self.
process,pathName))
1953 if hasattr(self.
process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1955 getattr(self.
process,pathName).
insert(0,self.process.genstepfilter)
1957 pathName=
'dqmofflineOnPAT_step' 1958 for (i,sequence)
in enumerate(postSequenceList):
1960 pathName=
'dqmofflineOnPAT_%d_step'%(i)
1962 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process, sequence ) ) )
1963 self.schedule.append(getattr(self.
process,pathName))
1966 """ Enrich the process with harvesting step """ 1967 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self._options.harvesting+
'_cff' 1971 sequence = sequence.split(
'.')[-1]
1974 harvestingList = sequence.split(
"+")
1975 from DQMOffline.Configuration.autoDQM
import autoDQM
1976 from Validation.Configuration.autoValidation
import autoValidation
1978 combined_mapping = copy.deepcopy( autoDQM )
1979 combined_mapping.update( autoValidation )
1980 self.
expandMapping(harvestingList,combined_mapping,index=-1)
1982 if len(set(harvestingList))!=len(harvestingList):
1983 harvestingList=
list(set(harvestingList))
1984 print(
"Duplicate entries for HARVESTING, using",harvestingList)
1986 for name
in harvestingList:
1987 if not name
in harvestingConfig.__dict__:
1988 print(name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
1990 harvestingstream = getattr(harvestingConfig,name)
1991 if isinstance(harvestingstream,cms.Path):
1992 self.schedule.append(harvestingstream)
1993 self.blacklist_paths.append(harvestingstream)
1994 if isinstance(harvestingstream,cms.Sequence):
1995 setattr(self.
process,name+
"_step",cms.Path(harvestingstream))
1996 self.schedule.append(getattr(self.
process,name+
"_step"))
2002 """ Enrich the process with AlCaHarvesting step """ 2004 sequence=sequence.split(
".")[-1]
2007 harvestingList = sequence.split(
"+")
2011 from Configuration.AlCa.autoPCL
import autoPCL
2014 for name
in harvestingConfig.__dict__:
2015 harvestingstream = getattr(harvestingConfig,name)
2016 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2017 self.schedule.append(harvestingstream)
2018 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2019 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2020 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2021 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2023 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2024 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2025 harvestingList.remove(name)
2027 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2028 self.schedule.append(lastStep)
2030 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2031 print(
"The following harvesting could not be found : ", harvestingList)
2032 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2042 self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2043 self.schedule.append(self.process.reconstruction)
2047 """ Add useful info for the production. """ 2048 self.process.configurationMetadata=cms.untracked.PSet\
2049 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2050 name=cms.untracked.string(
"Applications"),
2051 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2054 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2059 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2060 self.
pythonCfgCode +=
"# with command line options: "+self._options.arguments+
'\n' 2061 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2066 modifierImports=[
'from Configuration.StandardSequences.Eras import eras']
2068 if hasattr(self.
_options,
"era")
and self._options.era :
2070 from Configuration.StandardSequences.Eras
import eras
2071 for requestedEra
in self._options.era.split(
",") :
2072 modifierStrings.append(
"eras."+requestedEra)
2073 modifiers.append(getattr(eras,requestedEra))
2076 if hasattr(self.
_options,
"procModifiers")
and self._options.procModifiers:
2079 for pm
in self._options.procModifiers.split(
','):
2080 modifierStrings.append(pm)
2081 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2082 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2085 self.
pythonCfgCode +=
"process = cms.Process('"+self._options.name+
"'" 2088 if len(modifierStrings)>0:
2095 if len(modifiers)>0:
2096 self.
process = cms.Process(self._options.name,*modifiers)
2098 self.
process = cms.Process(self._options.name)
2104 """ Prepare the configuration string and add missing pieces.""" 2116 outputModuleCfgCode=
"" 2117 if not 'HARVESTING' in self.stepMap.keys()
and not 'ALCAHARVEST' in self.stepMap.keys()
and not 'ALCAOUTPUT' in self.stepMap.keys()
and self.
with_output:
2122 self.
pythonCfgCode +=
"# import of standard configurations\n" 2127 if not hasattr(self.
process,
"configurationMetadata"):
2131 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2146 nl=sorted(self.additionalOutputs.keys())
2149 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2150 tmpOut = cms.EndPath(output)
2151 setattr(self.
process,name+
'OutPath',tmpOut)
2152 self.schedule.append(tmpOut)
2160 for object
in self._options.inlineObjets.split(
','):
2163 if not hasattr(self.
process,object):
2164 print(
'cannot inline -'+object+
'- : not known')
2171 for path
in self.process.paths:
2175 for endpath
in self.process.endpaths:
2181 result =
"process.schedule = cms.Schedule(" 2184 self.process.schedule = cms.Schedule()
2186 if not isinstance(item, cms.Schedule):
2187 self.process.schedule.append(item)
2189 self.process.schedule.extend(item)
2191 if hasattr(self.
process,
"HLTSchedule"):
2192 beforeHLT = self.
schedule[:self.schedule.index(self.process.HLTSchedule)]
2193 afterHLT = self.
schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2194 pathNames = [
'process.'+p.label_()
for p
in beforeHLT]
2195 result +=
','.
join(pathNames)+
')\n' 2196 result +=
'process.schedule.extend(process.HLTSchedule)\n' 2197 pathNames = [
'process.'+p.label_()
for p
in afterHLT]
2198 result +=
'process.schedule.extend(['+
','.
join(pathNames)+
'])\n' 2200 pathNames = [
'process.'+p.label_()
for p
in self.
schedule]
2201 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2206 self.process.schedule.associate(getattr(self.
process, labelToAssociate))
2207 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2211 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2214 if self._options.nThreads
is not "1":
2217 self.
pythonCfgCode +=
"process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+
")\n" 2218 self.
pythonCfgCode +=
"process.options.numberOfStreams=cms.untracked.uint32(0)\n" 2220 if self._options.isRepacked:
2222 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2223 self.
pythonCfgCode +=
"MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n" 2224 MassReplaceInputTag(self.
process, new=
"rawDataMapperByLabel", old=
"rawDataCollector")
2228 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2236 for path
in self.process.paths:
2239 getattr(self.
process,path)._seq = pfs * getattr(self.
process,path)._seq
2245 if self._options.runUnscheduled:
2248 self.
pythonCfgCode+=
"#do not add changes to your config after this point (unless you know what you are doing)\n" 2249 self.
pythonCfgCode+=
"from FWCore.ParameterSet.Utilities import convertToUnscheduled\n" 2250 self.
pythonCfgCode+=
"process=convertToUnscheduled(process)\n" 2252 from FWCore.ParameterSet.Utilities
import convertToUnscheduled
2259 if hasattr(self.
process,
"logErrorHarvester"):
2261 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2262 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2263 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2264 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2271 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2272 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2273 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2275 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2281 if self._options.io:
2283 if not self._options.io.endswith(
'.io'): self._option.io+=
'.io' 2284 io=open(self._options.io,
'w')
2286 if hasattr(self.process.source,
"fileNames"):
2287 if len(self.process.source.fileNames.value()):
2288 ioJson[
'primary']=self.process.source.fileNames.value()
2289 if hasattr(self.process.source,
"secondaryFileNames"):
2290 if len(self.process.source.secondaryFileNames.value()):
2291 ioJson[
'secondary']=self.process.source.secondaryFileNames.value()
2292 if self._options.pileup_input
and (self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:')):
2293 ioJson[
'pileup']=self._options.pileup_input[4:]
2294 for (o,om)
in self.process.outputModules_().
items():
2295 ioJson[o]=om.fileName.value()
2296 ioJson[
'GT']=self.process.GlobalTag.globaltag.value()
2300 io.write(json.dumps(ioJson))
def load(self, includeFile)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def prepare_L1REPACK(self, sequence=None)
def finalizeFastSimHLT(self)
inliner
load the relevant part
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def customiseEarlyDelete(process)
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
S & print(S &os, JobReport::InputFile const &f)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_RECO(self, sequence="reconstruction")
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def prepare_PATGEN(self, sequence="miniGEN")
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def prepare_RECOSIM(self, sequence="recosim")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, sequence=None)
def convertToUnscheduled(proc)
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_NANO(self, sequence="nanoAOD")
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
def prepare_ALCAHARVEST(self, sequence=None)
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def completeInputCommand(self)
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def addStandardSequences(self)
def prepare_PAT(self, sequence="miniAOD")
nextScheduleIsConditional
put the filtering path in the schedule