3 from __future__
import print_function
4 __version__ =
"$Revision: 1.19 $" 5 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 7 import FWCore.ParameterSet.Config
as cms
8 from FWCore.ParameterSet.Modules
import _Module
12 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
18 from subprocess
import Popen,PIPE
19 import FWCore.ParameterSet.DictTypes
as DictTypes
25 defaultOptions.datamix =
'DataOnSim' 26 defaultOptions.isMC=
False 27 defaultOptions.isData=
True 28 defaultOptions.step=
'' 29 defaultOptions.pileup=
'NoPileUp' 30 defaultOptions.pileup_input =
None 31 defaultOptions.pileup_dasoption =
'' 32 defaultOptions.geometry =
'SimDB' 33 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
34 defaultOptions.magField =
'' 35 defaultOptions.conditions =
None 36 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
37 defaultOptions.harvesting=
'AtRunEnd' 38 defaultOptions.gflash =
False 39 defaultOptions.number = -1
40 defaultOptions.number_out =
None 41 defaultOptions.arguments =
"" 42 defaultOptions.name =
"NO NAME GIVEN" 43 defaultOptions.evt_type =
"" 44 defaultOptions.filein =
"" 45 defaultOptions.dasquery=
"" 46 defaultOptions.dasoption=
"" 47 defaultOptions.secondfilein =
"" 48 defaultOptions.customisation_file = []
49 defaultOptions.customisation_file_unsch = []
50 defaultOptions.customise_commands =
"" 51 defaultOptions.inline_custom=
False 52 defaultOptions.particleTable =
'pythiapdt' 53 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
54 defaultOptions.dirin =
'' 55 defaultOptions.dirout =
'' 56 defaultOptions.filetype =
'EDM' 57 defaultOptions.fileout =
'output.root' 58 defaultOptions.filtername =
'' 59 defaultOptions.lazy_download =
False 60 defaultOptions.custom_conditions =
'' 61 defaultOptions.hltProcess =
'' 62 defaultOptions.eventcontent =
None 63 defaultOptions.datatier =
None 64 defaultOptions.inlineEventContent =
True 65 defaultOptions.inlineObjets =
'' 66 defaultOptions.hideGen=
False 67 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
68 defaultOptions.beamspot=
None 69 defaultOptions.outputDefinition =
'' 70 defaultOptions.inputCommands =
None 71 defaultOptions.outputCommands =
None 72 defaultOptions.inputEventContent =
'' 73 defaultOptions.dropDescendant =
False 74 defaultOptions.relval =
None 75 defaultOptions.profile =
None 76 defaultOptions.isRepacked =
False 77 defaultOptions.restoreRNDSeeds =
False 78 defaultOptions.donotDropOnInput =
'' 79 defaultOptions.python_filename =
'' 80 defaultOptions.io=
None 81 defaultOptions.lumiToProcess=
None 82 defaultOptions.fast=
False 83 defaultOptions.runsAndWeightsForMC =
None 84 defaultOptions.runsScenarioForMC =
None 85 defaultOptions.runUnscheduled =
False 86 defaultOptions.timeoutOutput =
False 87 defaultOptions.nThreads =
'1' 88 defaultOptions.nStreams =
'0' 89 defaultOptions.nConcurrentLumis =
'1' 93 theObject = getattr(process,name)
94 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
95 return "process."+name+
" = " + theObject.dumpPython(
"process")
96 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
97 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 99 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 102 import FWCore.ParameterSet.Config
as cms
105 for line
in open(fileName,
'r'): 106 if line.count(
".root")>=2:
108 entries=line.replace(
"\n",
"").
split()
109 if not entries[0]
in prim:
110 prim.append(entries[0])
111 if not entries[1]
in sec:
112 sec.append(entries[1])
113 elif (line.find(
".root")!=-1):
114 entry=line.replace(
"\n",
"")
115 if not entry
in prim:
118 if not hasattr(s,
"fileNames"):
119 s.fileNames=cms.untracked.vstring(prim)
121 s.fileNames.extend(prim)
123 if not hasattr(s,
"secondaryFileNames"):
124 s.secondaryFileNames=cms.untracked.vstring(sec)
126 s.secondaryFileNames.extend(sec)
127 print(
"found files: ",prim)
129 raise Exception(
"There are not files in input from the file list")
131 print(
"found parent files:",sec)
136 import FWCore.ParameterSet.Config
as cms
139 print(
"the query is",query)
142 while eC!=0
and count<3:
144 print(
'Sleeping, then retrying DAS')
146 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True)
148 tupleP = os.waitpid(p.pid, 0)
152 print(
"DAS succeeded after",count,
"attempts",eC)
154 print(
"DAS failed 3 times- I give up")
155 for line
in pipe.split(
'\n'):
156 if line.count(
".root")>=2:
158 entries=line.replace(
"\n",
"").
split()
159 if not entries[0]
in prim:
160 prim.append(entries[0])
161 if not entries[1]
in sec:
162 sec.append(entries[1])
163 elif (line.find(
".root")!=-1):
164 entry=line.replace(
"\n",
"")
165 if not entry
in prim:
168 if not hasattr(s,
"fileNames"):
169 s.fileNames=cms.untracked.vstring(prim)
171 s.fileNames.extend(prim)
173 if not hasattr(s,
"secondaryFileNames"):
174 s.secondaryFileNames=cms.untracked.vstring(sec)
176 s.secondaryFileNames.extend(sec)
177 print(
"found files: ",prim)
179 print(
"found parent files:",sec)
182 def anyOf(listOfKeys,dict,opt=None):
191 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
194 """The main building routines """ 196 def __init__(self, options, process = None, with_output = False, with_input = False ):
197 """options taken from old cmsDriver and optparse """ 199 options.outfile_name = options.dirout+options.fileout
203 if self._options.isData
and options.isMC:
204 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
209 if 'ENDJOB' in self._options.step:
210 if (hasattr(self.
_options,
"outputDefinition")
and \
211 self._options.outputDefinition !=
'' and \
212 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self._options.outputDefinition)))
or \
213 (hasattr(self.
_options,
"datatier")
and \
214 self._options.datatier
and \
215 'DQMIO' in self._options.datatier):
216 print(
"removing ENDJOB from steps since not compatible with DQMIO dataTier")
217 self._options.step=self._options.step.replace(
',ENDJOB',
'')
222 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
225 for step
in self._options.step.split(
","):
226 if step==
'':
continue 227 stepParts = step.split(
":")
228 stepName = stepParts[0]
229 if stepName
not in stepList
and not stepName.startswith(
're'):
230 raise ValueError(
"Step "+stepName+
" unknown")
231 if len(stepParts)==1:
233 elif len(stepParts)==2:
235 elif len(stepParts)==3:
236 self.
stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
238 raise ValueError(
"Step definition "+step+
" invalid")
239 self.stepKeys.append(stepName)
246 if hasattr(self.
_options,
"no_output_flag")
and self._options.no_output_flag:
273 Function to add the igprof profile service so that you can dump in the middle 276 profileOpts = self._options.profile.split(
':')
278 profilerInterval = 100
279 profilerFormat =
None 280 profilerJobFormat =
None 286 startEvent = profileOpts.pop(0)
287 if not startEvent.isdigit():
288 raise Exception(
"%s is not a number" % startEvent)
289 profilerStart =
int(startEvent)
291 eventInterval = profileOpts.pop(0)
292 if not eventInterval.isdigit():
293 raise Exception(
"%s is not a number" % eventInterval)
294 profilerInterval =
int(eventInterval)
296 profilerFormat = profileOpts.pop(0)
299 if not profilerFormat:
300 profilerFormat =
"%s___%s___%%I.gz" % (
301 self._options.evt_type.replace(
"_cfi",
""),
303 str(self._options.step) +
str(self._options.pileup) +
str(self._options.conditions) +
304 str(self._options.datatier) +
str(self._options.profileTypeLabel)
307 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
308 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
309 elif not profilerJobFormat:
310 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 312 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
315 includeFile = includeFile.replace(
'/',
'.')
316 self.process.load(includeFile)
317 return sys.modules[includeFile]
320 """helper routine to load am memorize imports""" 323 includeFile = includeFile.replace(
'/',
'.')
324 self.imports.append(includeFile)
325 self.process.load(includeFile)
326 return sys.modules[includeFile]
329 """helper routine to remember replace statements""" 330 self.additionalCommands.append(command)
331 if not command.strip().startswith(
"#"):
334 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
338 if 'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys():
339 self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring(
'ProductNotFound'),fileMode = cms.untracked.string(
'FULLMERGE'))
341 self.process.options = cms.untracked.PSet( )
343 self.addedObjects.append((
"",
"options"))
345 if self._options.lazy_download:
346 self.process.AdaptorConfig = cms.Service(
"AdaptorConfig",
347 stats = cms.untracked.bool(
True),
348 enable = cms.untracked.bool(
True),
349 cacheHint = cms.untracked.string(
"lazy-download"),
350 readHint = cms.untracked.string(
"read-ahead-buffered")
352 self.addedObjects.append((
"Setup lazy download",
"AdaptorConfig"))
357 if self._options.profile:
359 self.process.IgProfService = cms.Service(
"IgProfService",
360 reportFirstEvent = cms.untracked.int32(start),
361 reportEventInterval = cms.untracked.int32(interval),
362 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
363 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
364 self.addedObjects.append((
"Setup IGProf Service for profiling",
"IgProfService"))
367 """Here we decide how many evts will be processed""" 368 self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(
int(self._options.number)))
369 if self._options.number_out:
370 self.process.maxEvents.output = cms.untracked.int32(
int(self._options.number_out))
371 self.addedObjects.append((
"",
"maxEvents"))
374 """Here the source is built. Priority: file, generator""" 375 self.addedObjects.append((
"Input source",
"source"))
377 def filesFromOption(self):
378 for entry
in self._options.filein.split(
','):
380 if entry.startswith(
"filelist:"):
382 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
383 filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
385 self.process.source.fileNames.append(self._options.dirin+entry)
386 if self._options.secondfilein:
387 if not hasattr(self.process.source,
"secondaryFileNames"):
388 raise Exception(
"--secondfilein not compatible with "+self._options.filetype+
"input type")
389 for entry
in self._options.secondfilein.split(
','):
391 if entry.startswith(
"filelist:"):
392 self.process.source.secondaryFileNames.extend((
filesFromList(entry[9:]))[0])
393 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
394 self.process.source.secondaryFileNames.extend((
filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
396 self.process.source.secondaryFileNames.append(self._options.dirin+entry)
398 if self._options.filein
or self._options.dasquery:
399 if self._options.filetype ==
"EDM":
400 self.process.source=cms.Source(
"PoolSource",
401 fileNames = cms.untracked.vstring(),
402 secondaryFileNames= cms.untracked.vstring())
403 filesFromOption(self)
404 elif self._options.filetype ==
"DAT":
405 self.process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
406 filesFromOption(self)
407 elif self._options.filetype ==
"LHE":
408 self.process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
409 if self._options.filein.startswith(
"lhe:"):
411 args=self._options.filein.split(
':')
413 print(
'LHE input from article ',article)
414 location=
'/store/lhe/' 416 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
417 for line
in textOfFiles:
418 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
419 self.process.source.fileNames.append(location+article+
'/'+fileName)
422 print(
'Issue to load LHE files, please check and try again.')
425 if len(self.process.source.fileNames)==0:
426 print(
'Issue with empty filename, but can pass line check')
429 self.process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
431 filesFromOption(self)
433 elif self._options.filetype ==
"DQM":
434 self.process.source=cms.Source(
"DQMRootSource",
435 fileNames = cms.untracked.vstring())
436 filesFromOption(self)
438 elif self._options.filetype ==
"DQMDAQ":
440 self.process.source=cms.Source(
"DQMStreamerReader")
443 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
444 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
446 if self._options.dasquery!=
'':
447 self.process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
448 filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
450 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
451 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
454 if 'GEN' in self.stepMap.keys():
455 if self._options.inputCommands:
456 self._options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 458 self._options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 460 if self.process.source
and self._options.inputCommands:
461 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
462 for command
in self._options.inputCommands.split(
','):
464 command = command.strip()
465 if command==
'':
continue 466 self.process.source.inputCommands.append(command)
467 if not self._options.dropDescendant:
468 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
470 if self._options.lumiToProcess:
471 import FWCore.PythonUtilities.LumiList
as LumiList
472 self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange(
LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().
split(
',') )
474 if 'GEN' in self.stepMap.keys()
or 'LHE' in self.
stepMap or (
not self._options.filein
and hasattr(self.
_options,
"evt_type")):
475 if self.process.source
is None:
476 self.process.source=cms.Source(
"EmptySource")
480 if self._options.runsAndWeightsForMC
or self._options.runsScenarioForMC :
481 if not self._options.isMC :
482 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
483 if self._options.runsAndWeightsForMC:
486 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
487 if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
488 __import__(RunsAndWeights[self._options.runsScenarioForMC])
489 self.
runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
491 self.
runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
494 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
496 self.additionalCommands.append(
'import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
497 self.additionalCommands.append(
'ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.
runsAndWeights))
502 """ Add output module to the process """ 504 if self._options.outputDefinition:
505 if self._options.datatier:
506 print(
"--datatier & --eventcontent options ignored")
509 outList = eval(self._options.outputDefinition)
510 for (id,outDefDict)
in enumerate(outList):
511 outDefDictStr=outDefDict.__str__()
512 if not isinstance(outDefDict,dict):
513 raise Exception(
"--output needs to be passed a list of dict"+self._options.outputDefinition+
" is invalid")
515 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
518 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
519 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
520 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
521 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
522 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
524 if not theModuleLabel:
525 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
526 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
527 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 529 for name
in tryNames:
530 if not hasattr(self.
process,name):
533 if not theModuleLabel:
534 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
536 defaultFileName=self._options.outfile_name
538 defaultFileName=self._options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
540 theFileName=self._options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
541 if not theFileName.endswith(
'.root'):
545 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
546 if theStreamType==
'DQMIO': theStreamType=
'DQM' 547 if theStreamType==
'ALL':
548 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
550 theEventContent = getattr(self.
process, theStreamType+
"EventContent")
554 if theStreamType==
'ALCARECO' and not theFilterName:
555 theFilterName=
'StreamALCACombined' 558 CppType=
'PoolOutputModule' 559 if self._options.timeoutOutput:
560 CppType=
'TimeoutPoolOutputModule' 561 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 562 output = cms.OutputModule(CppType,
563 theEventContent.clone(),
564 fileName = cms.untracked.string(theFileName),
565 dataset = cms.untracked.PSet(
566 dataTier = cms.untracked.string(theTier),
567 filterName = cms.untracked.string(theFilterName))
569 if not theSelectEvent
and hasattr(self.
process,
'generation_step')
and theStreamType!=
'LHE':
570 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
571 if not theSelectEvent
and hasattr(self.
process,
'filtering_step'):
572 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
574 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
577 if not hasattr(output,
'SelectEvents'):
578 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
580 output.SelectEvents.SelectEvents.extend(getattr(self.
process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
583 if hasattr(self.
process,theModuleLabel):
584 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
586 setattr(self.
process,theModuleLabel,output)
587 outputModule=getattr(self.
process,theModuleLabel)
588 setattr(self.
process,theModuleLabel+
'_step',cms.EndPath(outputModule))
589 path=getattr(self.
process,theModuleLabel+
'_step')
590 self.schedule.append(path)
592 if not self._options.inlineEventContent
and hasattr(self.
process,theStreamType+
"EventContent"):
593 def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process.
"+theStreamType+"EventContent.outputCommands)
"): 595 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
596 if theExtraOutputCommands:
597 if not isinstance(theExtraOutputCommands,list):
598 raise Exception(
"extra ouput command in --option must be a list of strings")
599 if hasattr(self.
process,theStreamType+
"EventContent"):
600 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
602 outputModule.outputCommands.extend(theExtraOutputCommands)
604 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
609 streamTypes=self._options.eventcontent.split(
',')
610 tiers=self._options.datatier.split(
',')
611 if not self._options.outputDefinition
and len(streamTypes)!=len(tiers):
612 raise Exception(
"number of event content arguments does not match number of datatier arguments")
615 if self._options.step.split(
',')[0].
split(
':')[0] ==
'ALCA':
618 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
619 if streamType==
'':
continue 620 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self._options.step:
continue 621 if streamType==
'DQMIO': streamType=
'DQM' 622 eventContent=streamType
624 if streamType ==
"NANOEDMAOD" :
625 eventContent =
"NANOAOD" 626 elif streamType ==
"NANOEDMAODSIM" :
627 eventContent =
"NANOAODSIM" 628 theEventContent = getattr(self.
process, eventContent+
"EventContent")
630 theFileName=self._options.outfile_name
631 theFilterName=self._options.filtername
633 theFileName=self._options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
634 theFilterName=self._options.filtername
635 CppType=
'PoolOutputModule' 636 if self._options.timeoutOutput:
637 CppType=
'TimeoutPoolOutputModule' 638 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 639 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 640 output = cms.OutputModule(CppType,
642 fileName = cms.untracked.string(theFileName),
643 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
644 filterName = cms.untracked.string(theFilterName)
647 if hasattr(self.
process,
"generation_step")
and streamType!=
'LHE':
648 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
649 if hasattr(self.
process,
"filtering_step"):
650 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
652 if streamType==
'ALCARECO':
653 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
655 if "MINIAOD" in streamType:
656 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
659 outputModuleName=streamType+
'output' 660 setattr(self.
process,outputModuleName,output)
661 outputModule=getattr(self.
process,outputModuleName)
662 setattr(self.
process,outputModuleName+
'_step',cms.EndPath(outputModule))
663 path=getattr(self.
process,outputModuleName+
'_step')
664 self.schedule.append(path)
666 if self._options.outputCommands
and streamType!=
'DQM':
667 for evct
in self._options.outputCommands.split(
','):
668 if not evct:
continue 669 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
671 if not self._options.inlineEventContent:
672 tmpstreamType=streamType
673 if "NANOEDM" in tmpstreamType :
674 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
675 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
677 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
679 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
685 Add selected standard sequences to the process 688 if self._options.pileup:
689 pileupSpec=self._options.pileup.split(
',')[0]
692 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
693 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
694 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
698 if '.' in pileupSpec:
699 mixingDict={
'file':pileupSpec}
700 elif pileupSpec.startswith(
'file:'):
701 mixingDict={
'file':pileupSpec[5:]}
704 mixingDict=copy.copy(Mixing[pileupSpec])
705 if len(self._options.pileup.split(
','))>1:
706 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(
',')+1:]))
709 if 'file:' in pileupSpec:
711 self.process.load(mixingDict[
'file'])
712 print(
"inlining mixing module configuration")
713 self._options.inlineObjets+=
',mix' 717 mixingDict.pop(
'file')
718 if not "DATAMIX" in self.stepMap.keys():
719 if self._options.pileup_input:
720 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
721 mixingDict[
'F']=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
722 elif self._options.pileup_input.startswith(
"filelist:"):
723 mixingDict[
'F']=(
filesFromList(self._options.pileup_input[9:]))[0]
725 mixingDict[
'F']=self._options.pileup_input.split(
',')
727 for command
in specialization:
729 if len(mixingDict)!=0:
730 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
737 if (
'SIM' in self.
stepMap or 'reSIM' in self.
stepMap)
and not self._options.fast:
742 print(
"Geometry option",self._options.geometry,
"unknown.")
749 stepSpec = self.
stepMap[stepName]
750 print(
"Step:", stepName,
"Spec:",stepSpec)
751 if stepName.startswith(
're'):
753 if stepName[2:]
not in self._options.donotDropOnInput:
754 self._options.inputEventContent=
'%s,%s'%(stepName.upper(),self._options.inputEventContent)
755 stepName=stepName[2:]
757 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
758 elif isinstance(stepSpec, list):
759 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
760 elif isinstance(stepSpec, tuple):
761 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
763 raise ValueError(
"Invalid step definition")
765 if self._options.restoreRNDSeeds!=
False:
767 if self._options.restoreRNDSeeds==
True:
768 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
770 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
771 if self._options.inputEventContent
or self._options.inputCommands:
772 if self._options.inputCommands:
773 self._options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 775 self._options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 779 if self._options.inputEventContent:
781 def dropSecondDropStar(iec):
792 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
793 for evct
in self._options.inputEventContent.split(
','):
794 if evct==
'':
continue 795 theEventContent = getattr(self.
process, evct+
"EventContent")
796 if hasattr(theEventContent,
'outputCommands'):
797 self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
798 if hasattr(theEventContent,
'inputCommands'):
799 self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
801 dropSecondDropStar(self.process.source.inputCommands)
803 if not self._options.dropDescendant:
804 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
810 """Add conditions to the process""" 811 if not self._options.conditions:
return 813 if 'FrontierConditions_GlobalTag' in self._options.conditions:
814 print(
'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
815 self._options.conditions = self._options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
819 self.process.GlobalTag =
GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
820 self.additionalCommands.append(
'from Configuration.AlCa.GlobalTag import GlobalTag')
821 self.additionalCommands.append(
'process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
825 """Include the customise code """ 829 for c
in self._options.customisation_file:
830 custOpt.extend(c.split(
","))
832 for c
in self._options.customisation_file_unsch:
833 custOpt.extend(c.split(
","))
839 raise Exception(
"more than . in the specification:"+opt)
840 fileName=opt.split(
'.')[0]
841 if opt.count(
'.')==0: rest=
'customise' 843 rest=opt.split(
'.')[1]
844 if rest==
'py': rest=
'customise' 846 if fileName
in custMap:
847 custMap[fileName].extend(rest.split(
'+'))
849 custMap[fileName]=rest.split(
'+')
854 final_snippet=
'\n# customisation of the process.\n' 858 allFcn.extend(custMap[opt])
860 if allFcn.count(fcn)!=1:
861 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
865 packageName = f.replace(
".py",
"").
replace(
"/",
".")
866 __import__(packageName)
867 package = sys.modules[packageName]
870 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
872 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 873 if self._options.inline_custom:
874 for line
in file(customiseFile,
'r'): 875 if "import FWCore.ParameterSet.Config" in line:
877 final_snippet += line
879 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
880 for fcn
in custMap[f]:
881 print(
"customising the process with",fcn,
"from",f)
882 if not hasattr(package,fcn):
884 raise Exception(
"config "+f+
" has no function "+fcn)
888 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
889 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
892 final_snippet +=
'\n# End of customisation functions\n' 898 final_snippet=
'\n# Customisation from command line\n' 899 if self._options.customise_commands:
901 for com
in self._options.customise_commands.split(
'\\n'):
902 com=string.lstrip(com)
904 final_snippet +=
'\n'+com
915 if self._options.particleTable
not in defaultOptions.particleTableList:
916 print(
'Invalid particle table provided. Options are:')
917 print(defaultOptions.particleTable)
921 self.
loadAndRemember(
'SimGeneral.HepPDTESSource.'+self._options.particleTable+
'_cfi')
934 if self._options.isRepacked: self.
RAW2DIGIDefaultCFF=
"Configuration/StandardSequences/RawToDigi_DataMapper_cff" 954 if "DATAMIX" in self.stepMap.keys():
958 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 976 if self._options.fast
or (
'RAW2DIGI' in self.
stepMap and 'RECO' in self.
stepMap):
994 if not self._options.beamspot:
995 self._options.beamspot=VtxSmearedDefaultKey
998 if self._options.isMC==
True:
1000 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 1001 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 1004 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1007 self._options.beamspot =
None 1013 if self._options.scenario==
'cosmics':
1014 self._options.pileup=
'Cosmics' 1015 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1016 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1017 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1021 if self._options.isMC==
True:
1027 if self._options.scenario==
'HeavyIons':
1028 if not self._options.beamspot:
1029 self._options.beamspot=VtxSmearedHIDefaultKey
1034 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1036 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1039 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1041 if self._options.isMC==
True:
1051 if self._options.isData:
1052 if self._options.magField==defaultOptions.magField:
1053 print(
"magnetic field option forced to: AutoFromDBCurrent")
1054 self._options.magField=
'AutoFromDBCurrent' 1055 self.
magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace(
'.',
'')+
'_cff' 1056 self.
magFieldCFF = self.magFieldCFF.replace(
"__",
'_')
1062 if self._options.fast:
1063 if 'start' in self._options.conditions.lower():
1064 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1066 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1068 def inGeometryKeys(opt):
1069 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1070 if opt
in GeometryConf:
1071 return GeometryConf[opt]
1075 geoms=self._options.geometry.split(
',')
1076 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).
split(
',')
1079 if '/' in geoms[1]
or '_cff' in geoms[1]:
1082 self.
GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1084 if (geoms[0].startswith(
'DB:')):
1089 if '/' in geoms[0]
or '_cff' in geoms[0]:
1092 simGeometry=geoms[0]
1093 if self._options.gflash==
True:
1094 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1096 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1099 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1100 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1102 if self._options.scenario==
'nocoll' or self._options.scenario==
'cosmics':
1103 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1104 self._options.beamspot=
'NoSmear' 1107 if self._options.fast:
1108 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1109 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1116 if self._options.pileup==
'default':
1117 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1118 self._options.pileup=MixingDefaultKey
1122 if self._options.isData:
1123 self._options.pileup=
None 1131 output = cms.OutputModule(
"PoolOutputModule")
1132 if stream.selectEvents.parameters_().__len__()!=0:
1133 output.SelectEvents = stream.selectEvents
1135 output.SelectEvents = cms.untracked.PSet()
1136 output.SelectEvents.SelectEvents=cms.vstring()
1137 if isinstance(stream.paths,tuple):
1138 for path
in stream.paths:
1139 output.SelectEvents.SelectEvents.append(path.label())
1141 output.SelectEvents.SelectEvents.append(stream.paths.label())
1145 if isinstance(stream.content,str):
1146 evtPset=getattr(self.process,stream.content)
1147 for p
in evtPset.parameters_():
1148 setattr(output,p,getattr(evtPset,p))
1149 if not self._options.inlineEventContent:
1150 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1152 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1154 output.outputCommands = stream.content
1157 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1159 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1160 filterName = cms.untracked.string(stream.name))
1162 if self._options.filtername:
1163 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1166 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1168 if workflow
in (
"producers,full"):
1169 if isinstance(stream.paths,tuple):
1170 for path
in stream.paths:
1171 self.schedule.append(path)
1173 self.schedule.append(stream.paths)
1177 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1178 self.additionalOutputs[name] = output
1179 setattr(self.process,name,output)
1181 if workflow ==
'output':
1183 filterList = output.SelectEvents.SelectEvents
1184 for i, filter
in enumerate(filterList):
1185 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1195 if ( len(sequence.split(
'.'))==1 ):
1197 elif ( len(sequence.split(
'.'))==2 ):
1199 sequence=sequence.split(
'.')[1]
1201 print(
"sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1202 print(sequence,
"not recognized")
1209 for i,s
in enumerate(seq.split(
'*')):
1211 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, s) ))
1213 p=getattr(self.
process,prefix)
1214 tmp = getattr(self.
process, s)
1215 if isinstance(tmp, cms.Task):
1219 self.schedule.append(getattr(self.
process,prefix))
1225 self.conditionalPaths.append(prefix)
1226 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, seq) ))
1227 self.schedule.append(getattr(self.
process,prefix))
1229 for i,s
in enumerate(seq.split(
'+')):
1231 setattr(self.
process,sn,getattr(cms,what)( getattr(self.
process, s) ))
1232 self.schedule.append(getattr(self.
process,sn))
1246 """ Enrich the process with alca streams """ 1248 sequence = sequence.split(
'.')[-1]
1251 alcaList = sequence.split(
"+")
1253 from Configuration.AlCa.autoAlca
import autoAlca
1257 for name
in alcaConfig.__dict__:
1258 alcastream = getattr(alcaConfig,name)
1259 shortName = name.replace(
'ALCARECOStream',
'')
1260 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1261 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1262 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1263 self.AlCaPaths.append(shortName)
1264 if 'DQM' in alcaList:
1265 if not self._options.inlineEventContent
and hasattr(self.
process,name):
1266 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1268 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1271 if self._options.hltProcess
or 'HLT' in self.
stepMap:
1272 if isinstance(alcastream.paths,tuple):
1273 for path
in alcastream.paths:
1278 for i
in range(alcaList.count(shortName)):
1279 alcaList.remove(shortName)
1282 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1283 path = getattr(alcaConfig,name)
1284 self.schedule.append(path)
1285 alcaList.remove(
'DQM')
1287 if isinstance(alcastream,cms.Path):
1289 self.blacklist_paths.append(alcastream)
1292 if len(alcaList) != 0:
1294 for name
in alcaConfig.__dict__:
1295 alcastream = getattr(alcaConfig,name)
1296 if isinstance(alcastream,cms.FilteredStream):
1297 available.append(name.replace(
'ALCARECOStream',
''))
1298 print(
"The following alcas could not be found "+
str(alcaList))
1299 print(
"available ",available)
1301 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1306 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1307 print(
"Loading lhe fragment from",loadFragment)
1308 __import__(loadFragment)
1309 self.process.load(loadFragment)
1311 self._options.inlineObjets+=
','+sequence
1313 getattr(self.process,sequence).nEvents =
int(self._options.number)
1316 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1317 self.excludedPaths.append(
"lhe_step")
1318 self.schedule.append( self.process.lhe_step )
1321 """ load the fragment of generator configuration """ 1326 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'')
1328 if not '/' in loadFragment:
1329 loadFragment=
'Configuration.Generator.'+loadFragment
1331 loadFragment=loadFragment.replace(
'/',
'.')
1333 print(
"Loading generator fragment from",loadFragment)
1334 __import__(loadFragment)
1338 if not (self._options.filein
or self._options.dasquery):
1339 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1342 generatorModule=sys.modules[loadFragment]
1343 genModules=generatorModule.__dict__
1349 if self._options.hideGen:
1352 self.process.load(loadFragment)
1354 import FWCore.ParameterSet.Modules
as cmstypes
1355 for name
in genModules:
1356 theObject = getattr(generatorModule,name)
1357 if isinstance(theObject, cmstypes._Module):
1358 self._options.inlineObjets=name+
','+self._options.inlineObjets
1359 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1360 self._options.inlineObjets+=
','+name
1362 if sequence == self.
GENDefaultSeq or sequence ==
'pgen_genonly':
1363 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1365 elif 'generator' in genModules:
1368 """ Enrich the schedule with the rest of the generation step """ 1370 genSeqName=sequence.split(
'.')[-1]
1374 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1375 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1378 raise Exception(
"VertexSmearing type or beamspot "+self._options.beamspot+
" unknown.")
1380 if self._options.scenario ==
'HeavyIons':
1381 if self._options.pileup==
'HiMixGEN':
1382 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1384 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1386 self.process.generation_step = cms.Path( getattr(self.
process,genSeqName) )
1387 self.schedule.append(self.process.generation_step)
1390 self.
executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1396 """ Enrich the schedule with the summary of the filter step """ 1403 """ Enrich the schedule with the simulation step""" 1405 if not self._options.fast:
1406 if self._options.gflash==
True:
1409 if self._options.magField==
'0T':
1412 if self._options.magField==
'0T':
1413 self.
executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1419 """ Enrich the schedule with the digitisation step""" 1422 if self._options.gflash==
True:
1423 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1425 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1426 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1428 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and sequence !=
'pdigi_hi_nogen' and not self.process.source.type_()==
'EmptySource':
1429 if self._options.inputEventContent==
'':
1430 self._options.inputEventContent=
'REGEN' 1432 self._options.inputEventContent=self._options.inputEventContent+
',REGEN' 1439 """ Enrich the schedule with the crossing frame writer step""" 1445 """ Enrich the schedule with the digitisation step""" 1449 if self._options.pileup_input:
1451 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
1452 theFiles=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1453 elif self._options.pileup_input.startswith(
"filelist:"):
1454 theFiles= (
filesFromList(self._options.pileup_input[9:]))[0]
1456 theFiles=self._options.pileup_input.split(
',')
1458 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1473 """ Enrich the schedule with the L1 simulation step""" 1474 assert(sequence ==
None)
1480 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1481 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1482 if sequence
in supported:
1483 self.
loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1484 if self._options.scenario ==
'HeavyIons':
1488 print(
"L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported)
1493 """ Enrich the schedule with the HLT simulation step""" 1495 print(
"no specification of the hlt menu has been given, should never happen")
1496 raise Exception(
'no HLT sequence provided')
1500 from Configuration.HLT.autoHLT
import autoHLT
1503 sequence = autoHLT[key]
1505 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1511 if self._options.scenario ==
'HeavyIons':
1512 optionsForHLT[
'type'] =
'HIon' 1514 optionsForHLT[
'type'] =
'GRun' 1515 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in six.iteritems(optionsForHLT))
1516 if sequence ==
'run,fromSource':
1517 if hasattr(self.process.source,
'firstRun'):
1518 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1519 elif hasattr(self.process.source,
'setRunNumber'):
1520 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1522 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1524 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1528 if self._options.isMC:
1529 self._options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1531 if self._options.name !=
'HLT':
1532 self.additionalCommands.append(
'from HLTrigger.Configuration.CustomConfigs import ProcessName')
1533 self.additionalCommands.append(
'process = ProcessName(process)')
1534 self.additionalCommands.append(
'')
1535 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1538 self.schedule.append(self.process.HLTSchedule)
1539 [self.blacklist_paths.append(path)
for path
in self.process.HLTSchedule
if isinstance(path,(cms.Path,cms.EndPath))]
1542 if self._options.fast:
1543 if not hasattr(self.
process,
'HLTEndSequence'):
1544 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1549 seqReco=sequence.split(
',')[1]
1550 seqDigi=sequence.split(
',')[0]
1552 print(
"RAW2RECO requires two specifications",sequence,
"insufficient")
1566 self.
loadAndRemember(
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1568 for filt
in allMetFilterPaths:
1569 self.schedule.append(getattr(self.
process,
'Flag_'+filt))
1572 ''' Enrich the schedule with L1 HW validation ''' 1575 print(
'\n\n\n DEPRECATED this has no action \n\n\n')
1579 ''' Enrich the schedule with L1 reconstruction ''' 1585 ''' Enrich the schedule with L1 reconstruction ''' 1591 ''' Enrich the schedule with a user defined filter sequence ''' 1593 filterConfig=self.
load(sequence.split(
'.')[0])
1594 filterSeq=sequence.split(
'.')[-1]
1596 class PrintAllModules(
object):
1600 def enter(self,visitee):
1602 label=visitee.label()
1607 def leave(self,v):
pass 1609 expander=PrintAllModules()
1611 self._options.inlineObjets+=
','+expander.inliner
1612 self._options.inlineObjets+=
','+filterSeq
1623 ''' Enrich the schedule with reconstruction ''' 1629 ''' Enrich the schedule with reconstruction ''' 1635 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1636 if not self._options.fast:
1637 print(
"ERROR: this step is only implemented for FastSim")
1640 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1644 ''' Enrich the schedule with PAT ''' 1647 self.labelsToAssociate.append(
'patTask')
1648 if not self._options.runUnscheduled:
1649 raise Exception(
"MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1650 if self._options.isData:
1651 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1653 if self._options.fast:
1654 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1656 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1658 if self._options.hltProcess:
1659 if len(self._options.customise_commands) > 1:
1660 self._options.customise_commands = self._options.customise_commands +
" \n" 1661 self._options.customise_commands = self._options.customise_commands +
"process.patTrigger.processName = \""+self._options.hltProcess+
"\"\n" 1662 self._options.customise_commands = self._options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1663 self._options.customise_commands = self._options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1670 ''' Enrich the schedule with PATGEN ''' 1672 self.labelsToAssociate.append(
'patGENTask')
1673 if not self._options.runUnscheduled:
1674 raise Exception(
"MiniGEN production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1675 if self._options.isData:
1676 raise Exception(
"PATGEN step can only run on MC")
1680 ''' Enrich the schedule with NANO ''' 1683 custom =
"nanoAOD_customizeData" if self._options.isData
else "nanoAOD_customizeMC" 1684 if self._options.runUnscheduled:
1685 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1687 self._options.customisation_file.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1688 if self._options.hltProcess:
1689 if len(self._options.customise_commands) > 1:
1690 self._options.customise_commands = self._options.customise_commands +
" \n" 1691 self._options.customise_commands = self._options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1695 ''' Enrich the schedule with event interpretation ''' 1696 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1697 if sequence
in EventInterpretation:
1699 sequence =
'EIsequence' 1701 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1707 ''' Enrich the schedule with skimming fragments''' 1709 sequence = sequence.split(
'.')[-1]
1711 skimlist=sequence.split(
'+')
1713 from Configuration.Skimming.autoSkim
import autoSkim
1717 for skim
in skimConfig.__dict__:
1718 skimstream = getattr(skimConfig,skim)
1719 if isinstance(skimstream,cms.Path):
1721 self.blacklist_paths.append(skimstream)
1722 if (
not isinstance(skimstream,cms.FilteredStream)):
1724 shortname = skim.replace(
'SKIMStream',
'')
1725 if (sequence==
"all"):
1727 elif (shortname
in skimlist):
1730 if self._options.datatier==
'DQM':
1732 skimstreamDQM = cms.FilteredStream(
1733 responsible = skimstream.responsible,
1734 name = skimstream.name+
'DQM',
1735 paths = skimstream.paths,
1736 selectEvents = skimstream.selectEvents,
1737 content = self._options.datatier+
'EventContent',
1738 dataTier = cms.untracked.string(self._options.datatier)
1741 for i
in range(skimlist.count(shortname)):
1742 skimlist.remove(shortname)
1746 if (skimlist.__len__()!=0
and sequence!=
"all"):
1747 print(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1748 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1751 ''' Enrich the schedule with a user defined sequence ''' 1757 """ Enrich the schedule with the postreco step """ 1764 print(sequence,
"in preparing validation")
1766 from Validation.Configuration.autoValidation
import autoValidation
1768 sequence=sequence.split(
'.')[-1]
1769 if sequence.find(
',')!=-1:
1770 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1771 valSeqName=sequence.split(
',')[1].
split(
'+')
1776 prevalSeqName=sequence.split(
'+')
1777 valSeqName=sequence.split(
'+')
1783 postfix=
'_'+sequence
1784 prevalSeqName=[
'prevalidation'+postfix]
1785 valSeqName=[
'validation'+postfix]
1786 if not hasattr(self.
process,valSeqName[0]):
1788 valSeqName=[sequence]
1799 if (
'HLT' in self.
stepMap and not self._options.fast)
or self._options.hltProcess:
1800 for s
in valSeqName+prevalSeqName:
1803 for (i,s)
in enumerate(prevalSeqName):
1805 setattr(self.
process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.
process, s)) )
1806 self.schedule.append(getattr(self.
process,
'prevalidation_step%s'%NFI(i)))
1808 for (i,s)
in enumerate(valSeqName):
1809 setattr(self.
process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.
process, s)))
1810 self.schedule.append(getattr(self.
process,
'validation_step%s'%NFI(i)))
1816 if not 'DIGI' in self.
stepMap and not self._options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1817 if self._options.restoreRNDSeeds==
False and not self._options.restoreRNDSeeds==
True:
1818 self._options.restoreRNDSeeds=
True 1820 if not 'DIGI' in self.
stepMap and not self._options.fast:
1824 self._options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1826 if hasattr(self.
process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1828 for (i,s)
in enumerate(valSeqName):
1829 getattr(self.
process,
'validation_step%s'%NFI(i)).
insert(0, self.process.genstepfilter)
1835 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1836 It will climb down within PSets, VPSets and VInputTags to find its target""" 1837 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1844 if isinstance(pset, cms._Parameterizable):
1845 for name
in pset.parameters_().
keys():
1851 value = getattr(pset,name)
1852 type = value.pythonTypeName()
1853 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1854 self.
doIt(value,base+
"."+name)
1855 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1856 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1857 elif type
in (
'cms.string',
'cms.untracked.string'):
1861 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1862 for (i,n)
in enumerate(value):
1863 if not isinstance(n, cms.InputTag):
1870 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1871 for (i,n)
in enumerate(value):
1874 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1877 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1882 label = visitee.label()
1883 except AttributeError:
1884 label =
'<Module not in a Process>' 1886 label =
'other execption' 1887 self.
doIt(visitee, label)
1894 print(
"Replacing all InputTag %s => %s"%(oldT,newT))
1897 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1899 self.additionalCommands.append(loadMe)
1900 self.additionalCommands.append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1904 if self._options.hltProcess:
1905 proc=self._options.hltProcess
1907 proc=self.process.name_()
1908 if proc==HLTprocess:
return 1910 print(
"replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc))
1912 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.
additionalCommands:
1913 self.additionalCommands.append(
'from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1914 self.additionalCommands.append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1920 while '@' in repr(seqList)
and level<maxLevel:
1922 for specifiedCommand
in seqList:
1923 if specifiedCommand.startswith(
'@'):
1924 location=specifiedCommand[1:]
1925 if not location
in mapping:
1926 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1927 mappedTo=mapping[location]
1929 mappedTo=mappedTo[index]
1930 seqList.remove(specifiedCommand)
1931 seqList.extend(mappedTo.split(
'+'))
1934 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1942 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1943 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1944 from DQMOffline.Configuration.autoDQM
import autoDQM
1948 if len(set(sequenceList))!=len(sequenceList):
1949 sequenceList=
list(set(sequenceList))
1950 print(
"Duplicate entries for DQM:, using",sequenceList)
1952 pathName=
'dqmoffline_step' 1953 for (i,sequence)
in enumerate(sequenceList):
1955 pathName=
'dqmoffline_%d_step'%(i)
1957 if 'HLT' in self.stepMap.keys()
or self._options.hltProcess:
1960 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process,sequence ) ) )
1961 self.schedule.append(getattr(self.
process,pathName))
1963 if hasattr(self.
process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1965 getattr(self.
process,pathName).
insert(0,self.process.genstepfilter)
1967 pathName=
'dqmofflineOnPAT_step' 1968 for (i,sequence)
in enumerate(postSequenceList):
1970 pathName=
'dqmofflineOnPAT_%d_step'%(i)
1972 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process, sequence ) ) )
1973 self.schedule.append(getattr(self.
process,pathName))
1976 """ Enrich the process with harvesting step """ 1977 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self._options.harvesting+
'_cff' 1981 sequence = sequence.split(
'.')[-1]
1984 harvestingList = sequence.split(
"+")
1985 from DQMOffline.Configuration.autoDQM
import autoDQM
1986 from Validation.Configuration.autoValidation
import autoValidation
1988 combined_mapping = copy.deepcopy( autoDQM )
1989 combined_mapping.update( autoValidation )
1990 self.
expandMapping(harvestingList,combined_mapping,index=-1)
1992 if len(set(harvestingList))!=len(harvestingList):
1993 harvestingList=
list(set(harvestingList))
1994 print(
"Duplicate entries for HARVESTING, using",harvestingList)
1996 for name
in harvestingList:
1997 if not name
in harvestingConfig.__dict__:
1998 print(name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2000 harvestingstream = getattr(harvestingConfig,name)
2001 if isinstance(harvestingstream,cms.Path):
2002 self.schedule.append(harvestingstream)
2003 self.blacklist_paths.append(harvestingstream)
2004 if isinstance(harvestingstream,cms.Sequence):
2005 setattr(self.
process,name+
"_step",cms.Path(harvestingstream))
2006 self.schedule.append(getattr(self.
process,name+
"_step"))
2012 """ Enrich the process with AlCaHarvesting step """ 2014 sequence=sequence.split(
".")[-1]
2017 harvestingList = sequence.split(
"+")
2021 from Configuration.AlCa.autoPCL
import autoPCL
2024 for name
in harvestingConfig.__dict__:
2025 harvestingstream = getattr(harvestingConfig,name)
2026 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2027 self.schedule.append(harvestingstream)
2028 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2029 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2030 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2031 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2033 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2034 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2035 harvestingList.remove(name)
2037 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2038 self.schedule.append(lastStep)
2040 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2041 print(
"The following harvesting could not be found : ", harvestingList)
2042 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2052 self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2053 self.schedule.append(self.process.reconstruction)
2057 """ Add useful info for the production. """ 2058 self.process.configurationMetadata=cms.untracked.PSet\
2059 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2060 name=cms.untracked.string(
"Applications"),
2061 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2064 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2069 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2070 self.
pythonCfgCode +=
"# with command line options: "+self._options.arguments+
'\n' 2071 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2078 if hasattr(self.
_options,
"era")
and self._options.era :
2080 from Configuration.StandardSequences.Eras
import eras
2081 for requestedEra
in self._options.era.split(
",") :
2082 modifierStrings.append(requestedEra)
2083 modifierImports.append(eras.pythonCfgLines[requestedEra])
2084 modifiers.append(getattr(eras,requestedEra))
2087 if hasattr(self.
_options,
"procModifiers")
and self._options.procModifiers:
2090 for pm
in self._options.procModifiers.split(
','):
2091 modifierStrings.append(pm)
2092 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2093 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2096 self.
pythonCfgCode +=
"process = cms.Process('"+self._options.name+
"'" 2099 if len(modifierStrings)>0:
2106 if len(modifiers)>0:
2107 self.
process = cms.Process(self._options.name,*modifiers)
2109 self.
process = cms.Process(self._options.name)
2115 """ Prepare the configuration string and add missing pieces.""" 2127 outputModuleCfgCode=
"" 2128 if not 'HARVESTING' in self.stepMap.keys()
and not 'ALCAHARVEST' in self.stepMap.keys()
and not 'ALCAOUTPUT' in self.stepMap.keys()
and self.
with_output:
2133 self.
pythonCfgCode +=
"# import of standard configurations\n" 2138 if not hasattr(self.
process,
"configurationMetadata"):
2142 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2157 nl=sorted(self.additionalOutputs.keys())
2160 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2161 tmpOut = cms.EndPath(output)
2162 setattr(self.
process,name+
'OutPath',tmpOut)
2163 self.schedule.append(tmpOut)
2171 for object
in self._options.inlineObjets.split(
','):
2174 if not hasattr(self.
process,object):
2175 print(
'cannot inline -'+object+
'- : not known')
2182 for path
in self.process.paths:
2186 for endpath
in self.process.endpaths:
2192 result =
"process.schedule = cms.Schedule(" 2195 self.process.schedule = cms.Schedule()
2197 if not isinstance(item, cms.Schedule):
2198 self.process.schedule.append(item)
2200 self.process.schedule.extend(item)
2202 if hasattr(self.
process,
"HLTSchedule"):
2203 beforeHLT = self.
schedule[:self.schedule.index(self.process.HLTSchedule)]
2204 afterHLT = self.
schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2205 pathNames = [
'process.'+p.label_()
for p
in beforeHLT]
2206 result +=
','.
join(pathNames)+
')\n' 2207 result +=
'process.schedule.extend(process.HLTSchedule)\n' 2208 pathNames = [
'process.'+p.label_()
for p
in afterHLT]
2209 result +=
'process.schedule.extend(['+
','.
join(pathNames)+
'])\n' 2211 pathNames = [
'process.'+p.label_()
for p
in self.
schedule]
2212 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2217 self.process.schedule.associate(getattr(self.
process, labelToAssociate))
2218 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2222 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2225 if self._options.nThreads
is not "1":
2228 self.
pythonCfgCode +=
"process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+
")\n" 2229 self.
pythonCfgCode +=
"process.options.numberOfStreams=cms.untracked.uint32("+self._options.nStreams+
")\n" 2230 self.
pythonCfgCode +=
"process.options.numberOfConcurrentLuminosityBlocks=cms.untracked.uint32("+self._options.nConcurrentLumis+
")\n" 2231 self.process.options.numberOfThreads=cms.untracked.uint32(
int(self._options.nThreads))
2232 self.process.options.numberOfStreams=cms.untracked.uint32(
int(self._options.nStreams))
2233 self.process.options.numberOfConcurrentLuminosityBlocks=cms.untracked.uint32(
int(self._options.nConcurrentLumis))
2235 if self._options.isRepacked:
2237 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2238 self.
pythonCfgCode +=
"MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n" 2239 MassReplaceInputTag(self.
process, new=
"rawDataMapperByLabel", old=
"rawDataCollector")
2243 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2251 for path
in self.process.paths:
2260 if self._options.runUnscheduled:
2263 self.
pythonCfgCode+=
"#do not add changes to your config after this point (unless you know what you are doing)\n" 2264 self.
pythonCfgCode+=
"from FWCore.ParameterSet.Utilities import convertToUnscheduled\n" 2265 self.
pythonCfgCode+=
"process=convertToUnscheduled(process)\n" 2267 from FWCore.ParameterSet.Utilities
import convertToUnscheduled
2274 if hasattr(self.
process,
"logErrorHarvester"):
2276 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2277 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2278 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2279 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2286 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2287 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2288 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2290 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2296 if self._options.io:
2298 if not self._options.io.endswith(
'.io'): self._option.io+=
'.io' 2299 io=open(self._options.io,
'w')
2301 if hasattr(self.process.source,
"fileNames"):
2302 if len(self.process.source.fileNames.value()):
2303 ioJson[
'primary']=self.process.source.fileNames.value()
2304 if hasattr(self.process.source,
"secondaryFileNames"):
2305 if len(self.process.source.secondaryFileNames.value()):
2306 ioJson[
'secondary']=self.process.source.secondaryFileNames.value()
2307 if self._options.pileup_input
and (self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:')):
2308 ioJson[
'pileup']=self._options.pileup_input[4:]
2309 for (o,om)
in self.process.outputModules_().
items():
2310 ioJson[o]=om.fileName.value()
2311 ioJson[
'GT']=self.process.GlobalTag.globaltag.value()
2315 io.write(json.dumps(ioJson))
def load(self, includeFile)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def prepare_L1REPACK(self, sequence=None)
def finalizeFastSimHLT(self)
inliner
load the relevant part
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def customiseEarlyDelete(process)
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
S & print(S &os, JobReport::InputFile const &f)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_RECO(self, sequence="reconstruction")
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def convertToUnscheduled(proc)
def prepare_PATGEN(self, sequence="miniGEN")
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def prepare_RECOSIM(self, sequence="recosim")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, sequence=None)
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_NANO(self, sequence="nanoAOD")
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
def prepare_ALCAHARVEST(self, sequence=None)
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def completeInputCommand(self)
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def addStandardSequences(self)
def prepare_PAT(self, sequence="miniAOD")
nextScheduleIsConditional
put the filtering path in the schedule