3 from __future__
import print_function
4 __version__ =
"$Revision: 1.19 $" 5 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 7 import FWCore.ParameterSet.Config
as cms
8 from FWCore.ParameterSet.Modules
import _Module
12 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
18 from subprocess
import Popen,PIPE
19 import FWCore.ParameterSet.DictTypes
as DictTypes
25 defaultOptions.datamix =
'DataOnSim' 26 defaultOptions.isMC=
False 27 defaultOptions.isData=
True 28 defaultOptions.step=
'' 29 defaultOptions.pileup=
'NoPileUp' 30 defaultOptions.pileup_input =
None 31 defaultOptions.pileup_dasoption =
'' 32 defaultOptions.geometry =
'SimDB' 33 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
34 defaultOptions.magField =
'' 35 defaultOptions.conditions =
None 36 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
37 defaultOptions.harvesting=
'AtRunEnd' 38 defaultOptions.gflash =
False 39 defaultOptions.number = -1
40 defaultOptions.number_out =
None 41 defaultOptions.arguments =
"" 42 defaultOptions.name =
"NO NAME GIVEN" 43 defaultOptions.evt_type =
"" 44 defaultOptions.filein =
"" 45 defaultOptions.dasquery=
"" 46 defaultOptions.dasoption=
"" 47 defaultOptions.secondfilein =
"" 48 defaultOptions.customisation_file = []
49 defaultOptions.customisation_file_unsch = []
50 defaultOptions.customise_commands =
"" 51 defaultOptions.inline_custom=
False 52 defaultOptions.particleTable =
'pythiapdt' 53 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
54 defaultOptions.dirin =
'' 55 defaultOptions.dirout =
'' 56 defaultOptions.filetype =
'EDM' 57 defaultOptions.fileout =
'output.root' 58 defaultOptions.filtername =
'' 59 defaultOptions.lazy_download =
False 60 defaultOptions.custom_conditions =
'' 61 defaultOptions.hltProcess =
'' 62 defaultOptions.eventcontent =
None 63 defaultOptions.datatier =
None 64 defaultOptions.inlineEventContent =
True 65 defaultOptions.inlineObjets =
'' 66 defaultOptions.hideGen=
False 67 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
68 defaultOptions.beamspot=
None 69 defaultOptions.outputDefinition =
'' 70 defaultOptions.inputCommands =
None 71 defaultOptions.outputCommands =
None 72 defaultOptions.inputEventContent =
'' 73 defaultOptions.dropDescendant =
False 74 defaultOptions.relval =
None 75 defaultOptions.profile =
None 76 defaultOptions.isRepacked =
False 77 defaultOptions.restoreRNDSeeds =
False 78 defaultOptions.donotDropOnInput =
'' 79 defaultOptions.python_filename =
'' 80 defaultOptions.io=
None 81 defaultOptions.lumiToProcess=
None 82 defaultOptions.fast=
False 83 defaultOptions.runsAndWeightsForMC =
None 84 defaultOptions.runsScenarioForMC =
None 85 defaultOptions.runUnscheduled =
False 86 defaultOptions.timeoutOutput =
False 87 defaultOptions.nThreads =
'1' 88 defaultOptions.nStreams =
'0' 89 defaultOptions.nConcurrentLumis =
'1' 93 theObject = getattr(process,name)
94 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
95 return "process."+name+
" = " + theObject.dumpPython(
"process")
96 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
97 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 99 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 102 import FWCore.ParameterSet.Config
as cms
105 for line
in open(fileName,
'r'): 106 if line.count(
".root")>=2:
108 entries=line.replace(
"\n",
"").
split()
109 prim.append(entries[0])
110 sec.append(entries[1])
111 elif (line.find(
".root")!=-1):
112 entry=line.replace(
"\n",
"")
115 prim = sorted(
list(set(prim)))
116 sec = sorted(
list(set(sec)))
118 if not hasattr(s,
"fileNames"):
119 s.fileNames=cms.untracked.vstring(prim)
121 s.fileNames.extend(prim)
123 if not hasattr(s,
"secondaryFileNames"):
124 s.secondaryFileNames=cms.untracked.vstring(sec)
126 s.secondaryFileNames.extend(sec)
127 print(
"found files: ",prim)
129 raise Exception(
"There are not files in input from the file list")
131 print(
"found parent files:",sec)
136 import FWCore.ParameterSet.Config
as cms
139 print(
"the query is",query)
142 while eC!=0
and count<3:
144 print(
'Sleeping, then retrying DAS')
146 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True)
148 tupleP = os.waitpid(p.pid, 0)
152 print(
"DAS succeeded after",count,
"attempts",eC)
154 print(
"DAS failed 3 times- I give up")
155 for line
in pipe.split(
'\n'):
156 if line.count(
".root")>=2:
158 entries=line.replace(
"\n",
"").
split()
159 prim.append(entries[0])
160 sec.append(entries[1])
161 elif (line.find(
".root")!=-1):
162 entry=line.replace(
"\n",
"")
165 prim = sorted(
list(set(prim)))
166 sec = sorted(
list(set(sec)))
168 if not hasattr(s,
"fileNames"):
169 s.fileNames=cms.untracked.vstring(prim)
171 s.fileNames.extend(prim)
173 if not hasattr(s,
"secondaryFileNames"):
174 s.secondaryFileNames=cms.untracked.vstring(sec)
176 s.secondaryFileNames.extend(sec)
177 print(
"found files: ",prim)
179 print(
"found parent files:",sec)
182 def anyOf(listOfKeys,dict,opt=None):
191 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
194 """The main building routines """ 196 def __init__(self, options, process = None, with_output = False, with_input = False ):
197 """options taken from old cmsDriver and optparse """ 199 options.outfile_name = options.dirout+options.fileout
203 if self._options.isData
and options.isMC:
204 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
209 if 'ENDJOB' in self._options.step:
210 if (hasattr(self.
_options,
"outputDefinition")
and \
211 self._options.outputDefinition !=
'' and \
212 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self._options.outputDefinition)))
or \
213 (hasattr(self.
_options,
"datatier")
and \
214 self._options.datatier
and \
215 'DQMIO' in self._options.datatier):
216 print(
"removing ENDJOB from steps since not compatible with DQMIO dataTier")
217 self._options.step=self._options.step.replace(
',ENDJOB',
'')
222 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
225 for step
in self._options.step.split(
","):
226 if step==
'':
continue 227 stepParts = step.split(
":")
228 stepName = stepParts[0]
229 if stepName
not in stepList
and not stepName.startswith(
're'):
230 raise ValueError(
"Step "+stepName+
" unknown")
231 if len(stepParts)==1:
233 elif len(stepParts)==2:
235 elif len(stepParts)==3:
236 self.
stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
238 raise ValueError(
"Step definition "+step+
" invalid")
239 self.stepKeys.append(stepName)
246 if hasattr(self.
_options,
"no_output_flag")
and self._options.no_output_flag:
273 Function to add the igprof profile service so that you can dump in the middle 276 profileOpts = self._options.profile.split(
':')
278 profilerInterval = 100
279 profilerFormat =
None 280 profilerJobFormat =
None 286 startEvent = profileOpts.pop(0)
287 if not startEvent.isdigit():
288 raise Exception(
"%s is not a number" % startEvent)
289 profilerStart =
int(startEvent)
291 eventInterval = profileOpts.pop(0)
292 if not eventInterval.isdigit():
293 raise Exception(
"%s is not a number" % eventInterval)
294 profilerInterval =
int(eventInterval)
296 profilerFormat = profileOpts.pop(0)
299 if not profilerFormat:
300 profilerFormat =
"%s___%s___%%I.gz" % (
301 self._options.evt_type.replace(
"_cfi",
""),
303 str(self._options.step) +
str(self._options.pileup) +
str(self._options.conditions) +
304 str(self._options.datatier) +
str(self._options.profileTypeLabel)
307 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
308 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
309 elif not profilerJobFormat:
310 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 312 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
315 includeFile = includeFile.replace(
'/',
'.')
316 self.process.load(includeFile)
317 return sys.modules[includeFile]
320 """helper routine to load am memorize imports""" 323 includeFile = includeFile.replace(
'/',
'.')
324 self.imports.append(includeFile)
325 self.process.load(includeFile)
326 return sys.modules[includeFile]
329 """helper routine to remember replace statements""" 330 self.additionalCommands.append(command)
331 if not command.strip().startswith(
"#"):
334 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
338 if 'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys():
339 self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring(
'ProductNotFound'),fileMode = cms.untracked.string(
'FULLMERGE'))
341 self.process.options = cms.untracked.PSet( )
343 self.addedObjects.append((
"",
"options"))
345 if self._options.lazy_download:
346 self.process.AdaptorConfig = cms.Service(
"AdaptorConfig",
347 stats = cms.untracked.bool(
True),
348 enable = cms.untracked.bool(
True),
349 cacheHint = cms.untracked.string(
"lazy-download"),
350 readHint = cms.untracked.string(
"read-ahead-buffered")
352 self.addedObjects.append((
"Setup lazy download",
"AdaptorConfig"))
357 if self._options.profile:
359 self.process.IgProfService = cms.Service(
"IgProfService",
360 reportFirstEvent = cms.untracked.int32(start),
361 reportEventInterval = cms.untracked.int32(interval),
362 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
363 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
364 self.addedObjects.append((
"Setup IGProf Service for profiling",
"IgProfService"))
367 """Here we decide how many evts will be processed""" 368 self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(
int(self._options.number)))
369 if self._options.number_out:
370 self.process.maxEvents.output = cms.untracked.int32(
int(self._options.number_out))
371 self.addedObjects.append((
"",
"maxEvents"))
374 """Here the source is built. Priority: file, generator""" 375 self.addedObjects.append((
"Input source",
"source"))
377 def filesFromOption(self):
378 for entry
in self._options.filein.split(
','):
380 if entry.startswith(
"filelist:"):
382 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
383 filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
385 self.process.source.fileNames.append(self._options.dirin+entry)
386 if self._options.secondfilein:
387 if not hasattr(self.process.source,
"secondaryFileNames"):
388 raise Exception(
"--secondfilein not compatible with "+self._options.filetype+
"input type")
389 for entry
in self._options.secondfilein.split(
','):
391 if entry.startswith(
"filelist:"):
392 self.process.source.secondaryFileNames.extend((
filesFromList(entry[9:]))[0])
393 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
394 self.process.source.secondaryFileNames.extend((
filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
396 self.process.source.secondaryFileNames.append(self._options.dirin+entry)
398 if self._options.filein
or self._options.dasquery:
399 if self._options.filetype ==
"EDM":
400 self.process.source=cms.Source(
"PoolSource",
401 fileNames = cms.untracked.vstring(),
402 secondaryFileNames= cms.untracked.vstring())
403 filesFromOption(self)
404 elif self._options.filetype ==
"DAT":
405 self.process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
406 filesFromOption(self)
407 elif self._options.filetype ==
"LHE":
408 self.process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
409 if self._options.filein.startswith(
"lhe:"):
411 args=self._options.filein.split(
':')
413 print(
'LHE input from article ',article)
414 location=
'/store/lhe/' 416 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
417 for line
in textOfFiles:
418 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
419 self.process.source.fileNames.append(location+article+
'/'+fileName)
422 print(
'Issue to load LHE files, please check and try again.')
425 if len(self.process.source.fileNames)==0:
426 print(
'Issue with empty filename, but can pass line check')
429 self.process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
431 filesFromOption(self)
433 elif self._options.filetype ==
"DQM":
434 self.process.source=cms.Source(
"DQMRootSource",
435 fileNames = cms.untracked.vstring())
436 filesFromOption(self)
438 elif self._options.filetype ==
"DQMDAQ":
440 self.process.source=cms.Source(
"DQMStreamerReader")
443 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
444 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
446 if self._options.dasquery!=
'':
447 self.process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
448 filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
450 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
451 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
454 if 'GEN' in self.stepMap.keys()
and not self._options.filetype ==
"LHE":
455 if self._options.inputCommands:
456 self._options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 458 self._options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 460 if self.process.source
and self._options.inputCommands
and not self._options.filetype ==
"LHE":
461 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
462 for command
in self._options.inputCommands.split(
','):
464 command = command.strip()
465 if command==
'':
continue 466 self.process.source.inputCommands.append(command)
467 if not self._options.dropDescendant:
468 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
470 if self._options.lumiToProcess:
471 import FWCore.PythonUtilities.LumiList
as LumiList
472 self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange(
LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().
split(
',') )
474 if 'GEN' in self.stepMap.keys()
or 'LHE' in self.
stepMap or (
not self._options.filein
and hasattr(self.
_options,
"evt_type")):
475 if self.process.source
is None:
476 self.process.source=cms.Source(
"EmptySource")
480 if self._options.runsAndWeightsForMC
or self._options.runsScenarioForMC :
481 if not self._options.isMC :
482 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
483 if self._options.runsAndWeightsForMC:
486 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
487 if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
488 __import__(RunsAndWeights[self._options.runsScenarioForMC])
489 self.
runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
491 self.
runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
494 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
496 self.additionalCommands.append(
'import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
497 self.additionalCommands.append(
'ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.
runsAndWeights))
502 """ Add output module to the process """ 504 if self._options.outputDefinition:
505 if self._options.datatier:
506 print(
"--datatier & --eventcontent options ignored")
509 outList = eval(self._options.outputDefinition)
510 for (id,outDefDict)
in enumerate(outList):
511 outDefDictStr=outDefDict.__str__()
512 if not isinstance(outDefDict,dict):
513 raise Exception(
"--output needs to be passed a list of dict"+self._options.outputDefinition+
" is invalid")
515 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
518 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
519 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
520 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
521 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
522 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
524 if not theModuleLabel:
525 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
526 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
527 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 529 for name
in tryNames:
530 if not hasattr(self.
process,name):
533 if not theModuleLabel:
534 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
536 defaultFileName=self._options.outfile_name
538 defaultFileName=self._options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
540 theFileName=self._options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
541 if not theFileName.endswith(
'.root'):
545 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
546 if theStreamType==
'DQMIO': theStreamType=
'DQM' 547 if theStreamType==
'ALL':
548 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
550 theEventContent = getattr(self.
process, theStreamType+
"EventContent")
554 if theStreamType==
'ALCARECO' and not theFilterName:
555 theFilterName=
'StreamALCACombined' 558 CppType=
'PoolOutputModule' 559 if self._options.timeoutOutput:
560 CppType=
'TimeoutPoolOutputModule' 561 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 562 output = cms.OutputModule(CppType,
563 theEventContent.clone(),
564 fileName = cms.untracked.string(theFileName),
565 dataset = cms.untracked.PSet(
566 dataTier = cms.untracked.string(theTier),
567 filterName = cms.untracked.string(theFilterName))
569 if not theSelectEvent
and hasattr(self.
process,
'generation_step')
and theStreamType!=
'LHE':
570 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
571 if not theSelectEvent
and hasattr(self.
process,
'filtering_step'):
572 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
574 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
577 if not hasattr(output,
'SelectEvents'):
578 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
580 output.SelectEvents.SelectEvents.extend(getattr(self.
process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
583 if hasattr(self.
process,theModuleLabel):
584 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
586 setattr(self.
process,theModuleLabel,output)
587 outputModule=getattr(self.
process,theModuleLabel)
588 setattr(self.
process,theModuleLabel+
'_step',cms.EndPath(outputModule))
589 path=getattr(self.
process,theModuleLabel+
'_step')
590 self.schedule.append(path)
592 if not self._options.inlineEventContent
and hasattr(self.
process,theStreamType+
"EventContent"):
593 def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process.
"+theStreamType+"EventContent.outputCommands)
"): 595 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
596 if theExtraOutputCommands:
597 if not isinstance(theExtraOutputCommands,list):
598 raise Exception(
"extra ouput command in --option must be a list of strings")
599 if hasattr(self.
process,theStreamType+
"EventContent"):
600 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
602 outputModule.outputCommands.extend(theExtraOutputCommands)
604 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
609 streamTypes=self._options.eventcontent.split(
',')
610 tiers=self._options.datatier.split(
',')
611 if not self._options.outputDefinition
and len(streamTypes)!=len(tiers):
612 raise Exception(
"number of event content arguments does not match number of datatier arguments")
615 if self._options.step.split(
',')[0].
split(
':')[0] ==
'ALCA':
618 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
619 if streamType==
'':
continue 620 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self._options.step:
continue 621 if streamType==
'DQMIO': streamType=
'DQM' 622 eventContent=streamType
624 if streamType ==
"NANOEDMAOD" :
625 eventContent =
"NANOAOD" 626 elif streamType ==
"NANOEDMAODSIM" :
627 eventContent =
"NANOAODSIM" 628 theEventContent = getattr(self.
process, eventContent+
"EventContent")
630 theFileName=self._options.outfile_name
631 theFilterName=self._options.filtername
633 theFileName=self._options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
634 theFilterName=self._options.filtername
635 CppType=
'PoolOutputModule' 636 if self._options.timeoutOutput:
637 CppType=
'TimeoutPoolOutputModule' 638 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 639 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 640 output = cms.OutputModule(CppType,
642 fileName = cms.untracked.string(theFileName),
643 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
644 filterName = cms.untracked.string(theFilterName)
647 if hasattr(self.
process,
"generation_step")
and streamType!=
'LHE':
648 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
649 if hasattr(self.
process,
"filtering_step"):
650 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
652 if streamType==
'ALCARECO':
653 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
655 if "MINIAOD" in streamType:
656 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
659 outputModuleName=streamType+
'output' 660 setattr(self.
process,outputModuleName,output)
661 outputModule=getattr(self.
process,outputModuleName)
662 setattr(self.
process,outputModuleName+
'_step',cms.EndPath(outputModule))
663 path=getattr(self.
process,outputModuleName+
'_step')
664 self.schedule.append(path)
666 if self._options.outputCommands
and streamType!=
'DQM':
667 for evct
in self._options.outputCommands.split(
','):
668 if not evct:
continue 669 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
671 if not self._options.inlineEventContent:
672 tmpstreamType=streamType
673 if "NANOEDM" in tmpstreamType :
674 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
675 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
677 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
679 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
685 Add selected standard sequences to the process 688 if self._options.pileup:
689 pileupSpec=self._options.pileup.split(
',')[0]
692 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
693 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
694 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
698 if '.' in pileupSpec:
699 mixingDict={
'file':pileupSpec}
700 elif pileupSpec.startswith(
'file:'):
701 mixingDict={
'file':pileupSpec[5:]}
704 mixingDict=copy.copy(Mixing[pileupSpec])
705 if len(self._options.pileup.split(
','))>1:
706 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(
',')+1:]))
709 if 'file:' in pileupSpec:
711 self.process.load(mixingDict[
'file'])
712 print(
"inlining mixing module configuration")
713 self._options.inlineObjets+=
',mix' 717 mixingDict.pop(
'file')
718 if not "DATAMIX" in self.stepMap.keys():
719 if self._options.pileup_input:
720 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
721 mixingDict[
'F']=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
722 elif self._options.pileup_input.startswith(
"filelist:"):
723 mixingDict[
'F']=(
filesFromList(self._options.pileup_input[9:]))[0]
725 mixingDict[
'F']=self._options.pileup_input.split(
',')
727 for command
in specialization:
729 if len(mixingDict)!=0:
730 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
737 if (
'SIM' in self.
stepMap or 'reSIM' in self.
stepMap)
and not self._options.fast:
742 print(
"Geometry option",self._options.geometry,
"unknown.")
749 stepSpec = self.
stepMap[stepName]
750 print(
"Step:", stepName,
"Spec:",stepSpec)
751 if stepName.startswith(
're'):
753 if stepName[2:]
not in self._options.donotDropOnInput:
754 self._options.inputEventContent=
'%s,%s'%(stepName.upper(),self._options.inputEventContent)
755 stepName=stepName[2:]
757 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
758 elif isinstance(stepSpec, list):
759 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
760 elif isinstance(stepSpec, tuple):
761 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
763 raise ValueError(
"Invalid step definition")
765 if self._options.restoreRNDSeeds!=
False:
767 if self._options.restoreRNDSeeds==
True:
768 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
770 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
771 if self._options.inputEventContent
or self._options.inputCommands:
772 if self._options.inputCommands:
773 self._options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 775 self._options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 779 if self._options.inputEventContent:
781 def dropSecondDropStar(iec):
791 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
792 for evct
in self._options.inputEventContent.split(
','):
793 if evct==
'':
continue 794 theEventContent = getattr(self.
process, evct+
"EventContent")
795 if hasattr(theEventContent,
'outputCommands'):
796 self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
797 if hasattr(theEventContent,
'inputCommands'):
798 self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
800 dropSecondDropStar(self.process.source.inputCommands)
802 if not self._options.dropDescendant:
803 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
809 """Add conditions to the process""" 810 if not self._options.conditions:
return 812 if 'FrontierConditions_GlobalTag' in self._options.conditions:
813 print(
'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
814 self._options.conditions = self._options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
818 self.process.GlobalTag =
GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
819 self.additionalCommands.append(
'from Configuration.AlCa.GlobalTag import GlobalTag')
820 self.additionalCommands.append(
'process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
824 """Include the customise code """ 828 for c
in self._options.customisation_file:
829 custOpt.extend(c.split(
","))
831 for c
in self._options.customisation_file_unsch:
832 custOpt.extend(c.split(
","))
838 raise Exception(
"more than . in the specification:"+opt)
839 fileName=opt.split(
'.')[0]
840 if opt.count(
'.')==0: rest=
'customise' 842 rest=opt.split(
'.')[1]
843 if rest==
'py': rest=
'customise' 845 if fileName
in custMap:
846 custMap[fileName].extend(rest.split(
'+'))
848 custMap[fileName]=rest.split(
'+')
853 final_snippet=
'\n# customisation of the process.\n' 857 allFcn.extend(custMap[opt])
859 if allFcn.count(fcn)!=1:
860 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
864 packageName = f.replace(
".py",
"").
replace(
"/",
".")
865 __import__(packageName)
866 package = sys.modules[packageName]
869 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
871 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 872 if self._options.inline_custom:
873 for line
in file(customiseFile,
'r'): 874 if "import FWCore.ParameterSet.Config" in line:
876 final_snippet += line
878 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
879 for fcn
in custMap[f]:
880 print(
"customising the process with",fcn,
"from",f)
881 if not hasattr(package,fcn):
883 raise Exception(
"config "+f+
" has no function "+fcn)
887 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
888 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
891 final_snippet +=
'\n# End of customisation functions\n' 897 final_snippet=
'\n# Customisation from command line\n' 898 if self._options.customise_commands:
900 for com
in self._options.customise_commands.split(
'\\n'):
901 com=string.lstrip(com)
903 final_snippet +=
'\n'+com
914 if self._options.particleTable
not in defaultOptions.particleTableList:
915 print(
'Invalid particle table provided. Options are:')
916 print(defaultOptions.particleTable)
920 self.
loadAndRemember(
'SimGeneral.HepPDTESSource.'+self._options.particleTable+
'_cfi')
933 if self._options.isRepacked: self.
RAW2DIGIDefaultCFF=
"Configuration/StandardSequences/RawToDigi_DataMapper_cff" 953 if "DATAMIX" in self.stepMap.keys():
957 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 975 if self._options.fast
or (
'RAW2DIGI' in self.
stepMap and 'RECO' in self.
stepMap):
993 if not self._options.beamspot:
994 self._options.beamspot=VtxSmearedDefaultKey
997 if self._options.isMC==
True:
999 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 1000 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 1003 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1006 self._options.beamspot =
None 1012 if self._options.scenario==
'cosmics':
1013 self._options.pileup=
'Cosmics' 1014 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1015 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1016 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1020 if self._options.isMC==
True:
1026 if self._options.scenario==
'HeavyIons':
1027 if not self._options.beamspot:
1028 self._options.beamspot=VtxSmearedHIDefaultKey
1033 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1035 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1038 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1040 if self._options.isMC==
True:
1050 if self._options.isData:
1051 if self._options.magField==defaultOptions.magField:
1052 print(
"magnetic field option forced to: AutoFromDBCurrent")
1053 self._options.magField=
'AutoFromDBCurrent' 1054 self.
magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace(
'.',
'')+
'_cff' 1055 self.
magFieldCFF = self.magFieldCFF.replace(
"__",
'_')
1061 if self._options.fast:
1062 if 'start' in self._options.conditions.lower():
1063 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1065 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1067 def inGeometryKeys(opt):
1068 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1069 if opt
in GeometryConf:
1070 return GeometryConf[opt]
1074 geoms=self._options.geometry.split(
',')
1075 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).
split(
',')
1078 if '/' in geoms[1]
or '_cff' in geoms[1]:
1081 self.
GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1083 if (geoms[0].startswith(
'DB:')):
1088 if '/' in geoms[0]
or '_cff' in geoms[0]:
1091 simGeometry=geoms[0]
1092 if self._options.gflash==
True:
1093 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1095 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1098 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1099 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1101 if self._options.scenario==
'nocoll' or self._options.scenario==
'cosmics':
1102 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1103 self._options.beamspot=
'NoSmear' 1106 if self._options.fast:
1107 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1108 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1115 if self._options.pileup==
'default':
1116 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1117 self._options.pileup=MixingDefaultKey
1121 if self._options.isData:
1122 self._options.pileup=
None 1130 output = cms.OutputModule(
"PoolOutputModule")
1131 if stream.selectEvents.parameters_().__len__()!=0:
1132 output.SelectEvents = stream.selectEvents
1134 output.SelectEvents = cms.untracked.PSet()
1135 output.SelectEvents.SelectEvents=cms.vstring()
1136 if isinstance(stream.paths,tuple):
1137 for path
in stream.paths:
1138 output.SelectEvents.SelectEvents.append(path.label())
1140 output.SelectEvents.SelectEvents.append(stream.paths.label())
1144 if isinstance(stream.content,str):
1145 evtPset=getattr(self.process,stream.content)
1146 for p
in evtPset.parameters_():
1147 setattr(output,p,getattr(evtPset,p))
1148 if not self._options.inlineEventContent:
1149 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1151 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1153 output.outputCommands = stream.content
1156 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1158 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1159 filterName = cms.untracked.string(stream.name))
1161 if self._options.filtername:
1162 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1165 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1167 if workflow
in (
"producers,full"):
1168 if isinstance(stream.paths,tuple):
1169 for path
in stream.paths:
1170 self.schedule.append(path)
1172 self.schedule.append(stream.paths)
1176 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1177 self.additionalOutputs[name] = output
1178 setattr(self.process,name,output)
1180 if workflow ==
'output':
1182 filterList = output.SelectEvents.SelectEvents
1183 for i, filter
in enumerate(filterList):
1184 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1194 if ( len(sequence.split(
'.'))==1 ):
1196 elif ( len(sequence.split(
'.'))==2 ):
1198 sequence=sequence.split(
'.')[1]
1200 print(
"sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1201 print(sequence,
"not recognized")
1208 for i,s
in enumerate(seq.split(
'*')):
1210 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, s) ))
1212 p=getattr(self.
process,prefix)
1213 tmp = getattr(self.
process, s)
1214 if isinstance(tmp, cms.Task):
1218 self.schedule.append(getattr(self.
process,prefix))
1224 self.conditionalPaths.append(prefix)
1225 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, seq) ))
1226 self.schedule.append(getattr(self.
process,prefix))
1228 for i,s
in enumerate(seq.split(
'+')):
1230 setattr(self.
process,sn,getattr(cms,what)( getattr(self.
process, s) ))
1231 self.schedule.append(getattr(self.
process,sn))
1245 """ Enrich the process with alca streams """ 1247 sequence = sequence.split(
'.')[-1]
1250 alcaList = sequence.split(
"+")
1252 from Configuration.AlCa.autoAlca
import autoAlca
1256 for name
in alcaConfig.__dict__:
1257 alcastream = getattr(alcaConfig,name)
1258 shortName = name.replace(
'ALCARECOStream',
'')
1259 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1260 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1261 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1262 self.AlCaPaths.append(shortName)
1263 if 'DQM' in alcaList:
1264 if not self._options.inlineEventContent
and hasattr(self.
process,name):
1265 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1267 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1270 if self._options.hltProcess
or 'HLT' in self.
stepMap:
1271 if isinstance(alcastream.paths,tuple):
1272 for path
in alcastream.paths:
1277 for i
in range(alcaList.count(shortName)):
1278 alcaList.remove(shortName)
1281 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1282 path = getattr(alcaConfig,name)
1283 self.schedule.append(path)
1284 alcaList.remove(
'DQM')
1286 if isinstance(alcastream,cms.Path):
1288 self.blacklist_paths.append(alcastream)
1291 if len(alcaList) != 0:
1293 for name
in alcaConfig.__dict__:
1294 alcastream = getattr(alcaConfig,name)
1295 if isinstance(alcastream,cms.FilteredStream):
1296 available.append(name.replace(
'ALCARECOStream',
''))
1297 print(
"The following alcas could not be found "+
str(alcaList))
1298 print(
"available ",available)
1300 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1305 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1306 print(
"Loading lhe fragment from",loadFragment)
1307 __import__(loadFragment)
1308 self.process.load(loadFragment)
1310 self._options.inlineObjets+=
','+sequence
1312 getattr(self.process,sequence).nEvents =
int(self._options.number)
1315 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1316 self.excludedPaths.append(
"lhe_step")
1317 self.schedule.append( self.process.lhe_step )
1320 """ load the fragment of generator configuration """ 1325 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'')
1327 if not '/' in loadFragment:
1328 loadFragment=
'Configuration.Generator.'+loadFragment
1330 loadFragment=loadFragment.replace(
'/',
'.')
1332 print(
"Loading generator fragment from",loadFragment)
1333 __import__(loadFragment)
1337 if not (self._options.filein
or self._options.dasquery):
1338 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1341 generatorModule=sys.modules[loadFragment]
1342 genModules=generatorModule.__dict__
1348 if self._options.hideGen:
1351 self.process.load(loadFragment)
1353 import FWCore.ParameterSet.Modules
as cmstypes
1354 for name
in genModules:
1355 theObject = getattr(generatorModule,name)
1356 if isinstance(theObject, cmstypes._Module):
1357 self._options.inlineObjets=name+
','+self._options.inlineObjets
1358 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1359 self._options.inlineObjets+=
','+name
1361 if sequence == self.
GENDefaultSeq or sequence ==
'pgen_genonly':
1362 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1364 elif 'generator' in genModules:
1367 """ Enrich the schedule with the rest of the generation step """ 1369 genSeqName=sequence.split(
'.')[-1]
1373 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1374 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1377 raise Exception(
"VertexSmearing type or beamspot "+self._options.beamspot+
" unknown.")
1379 if self._options.scenario ==
'HeavyIons':
1380 if self._options.pileup==
'HiMixGEN':
1381 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1383 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1385 self.process.generation_step = cms.Path( getattr(self.
process,genSeqName) )
1386 self.schedule.append(self.process.generation_step)
1389 self.
executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1395 """ Enrich the schedule with the summary of the filter step """ 1402 """ Enrich the schedule with the simulation step""" 1404 if not self._options.fast:
1405 if self._options.gflash==
True:
1408 if self._options.magField==
'0T':
1411 if self._options.magField==
'0T':
1412 self.
executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1418 """ Enrich the schedule with the digitisation step""" 1421 if self._options.gflash==
True:
1422 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1424 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1425 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1427 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and sequence !=
'pdigi_hi_nogen' and not self.process.source.type_()==
'EmptySource' and not self._options.filetype ==
"LHE":
1428 if self._options.inputEventContent==
'':
1429 self._options.inputEventContent=
'REGEN' 1431 self._options.inputEventContent=self._options.inputEventContent+
',REGEN' 1438 """ Enrich the schedule with the crossing frame writer step""" 1444 """ Enrich the schedule with the digitisation step""" 1448 if self._options.pileup_input:
1450 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
1451 theFiles=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1452 elif self._options.pileup_input.startswith(
"filelist:"):
1453 theFiles= (
filesFromList(self._options.pileup_input[9:]))[0]
1455 theFiles=self._options.pileup_input.split(
',')
1457 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1472 """ Enrich the schedule with the L1 simulation step""" 1473 assert(sequence ==
None)
1479 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1480 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1481 if sequence
in supported:
1482 self.
loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1483 if self._options.scenario ==
'HeavyIons':
1487 print(
"L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported)
1492 """ Enrich the schedule with the HLT simulation step""" 1494 print(
"no specification of the hlt menu has been given, should never happen")
1495 raise Exception(
'no HLT sequence provided')
1499 from Configuration.HLT.autoHLT
import autoHLT
1502 sequence = autoHLT[key]
1504 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1510 if self._options.scenario ==
'HeavyIons':
1511 optionsForHLT[
'type'] =
'HIon' 1513 optionsForHLT[
'type'] =
'GRun' 1514 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in six.iteritems(optionsForHLT))
1515 if sequence ==
'run,fromSource':
1516 if hasattr(self.process.source,
'firstRun'):
1517 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1518 elif hasattr(self.process.source,
'setRunNumber'):
1519 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1521 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1523 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1527 if self._options.isMC:
1528 self._options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1530 if self._options.name !=
'HLT':
1531 self.additionalCommands.append(
'from HLTrigger.Configuration.CustomConfigs import ProcessName')
1532 self.additionalCommands.append(
'process = ProcessName(process)')
1533 self.additionalCommands.append(
'')
1534 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1537 self.schedule.append(self.process.HLTSchedule)
1538 [self.blacklist_paths.append(path)
for path
in self.process.HLTSchedule
if isinstance(path,(cms.Path,cms.EndPath))]
1541 if self._options.fast:
1542 if not hasattr(self.
process,
'HLTEndSequence'):
1543 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1548 seqReco=sequence.split(
',')[1]
1549 seqDigi=sequence.split(
',')[0]
1551 print(
"RAW2RECO requires two specifications",sequence,
"insufficient")
1565 self.
loadAndRemember(
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1567 for filt
in allMetFilterPaths:
1568 self.schedule.append(getattr(self.
process,
'Flag_'+filt))
1571 ''' Enrich the schedule with L1 HW validation ''' 1574 print(
'\n\n\n DEPRECATED this has no action \n\n\n')
1578 ''' Enrich the schedule with L1 reconstruction ''' 1584 ''' Enrich the schedule with L1 reconstruction ''' 1590 ''' Enrich the schedule with a user defined filter sequence ''' 1592 filterConfig=self.
load(sequence.split(
'.')[0])
1593 filterSeq=sequence.split(
'.')[-1]
1595 class PrintAllModules(
object):
1599 def enter(self,visitee):
1601 label=visitee.label()
1606 def leave(self,v):
pass 1608 expander=PrintAllModules()
1610 self._options.inlineObjets+=
','+expander.inliner
1611 self._options.inlineObjets+=
','+filterSeq
1622 ''' Enrich the schedule with reconstruction ''' 1628 ''' Enrich the schedule with reconstruction ''' 1634 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1635 if not self._options.fast:
1636 print(
"ERROR: this step is only implemented for FastSim")
1639 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1643 ''' Enrich the schedule with PAT ''' 1646 self.labelsToAssociate.append(
'patTask')
1647 if not self._options.runUnscheduled:
1648 raise Exception(
"MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1649 if self._options.isData:
1650 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1652 if self._options.fast:
1653 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1655 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1657 if self._options.hltProcess:
1658 if len(self._options.customise_commands) > 1:
1659 self._options.customise_commands = self._options.customise_commands +
" \n" 1660 self._options.customise_commands = self._options.customise_commands +
"process.patTrigger.processName = \""+self._options.hltProcess+
"\"\n" 1661 self._options.customise_commands = self._options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1662 self._options.customise_commands = self._options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1669 ''' Enrich the schedule with PATGEN ''' 1671 self.labelsToAssociate.append(
'patGENTask')
1672 if not self._options.runUnscheduled:
1673 raise Exception(
"MiniGEN production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1674 if self._options.isData:
1675 raise Exception(
"PATGEN step can only run on MC")
1679 ''' Enrich the schedule with NANO ''' 1682 custom =
"nanoAOD_customizeData" if self._options.isData
else "nanoAOD_customizeMC" 1683 if self._options.runUnscheduled:
1684 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1686 self._options.customisation_file.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1687 if self._options.hltProcess:
1688 if len(self._options.customise_commands) > 1:
1689 self._options.customise_commands = self._options.customise_commands +
" \n" 1690 self._options.customise_commands = self._options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1694 ''' Enrich the schedule with event interpretation ''' 1695 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1696 if sequence
in EventInterpretation:
1698 sequence =
'EIsequence' 1700 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1706 ''' Enrich the schedule with skimming fragments''' 1708 sequence = sequence.split(
'.')[-1]
1710 skimlist=sequence.split(
'+')
1712 from Configuration.Skimming.autoSkim
import autoSkim
1716 for skim
in skimConfig.__dict__:
1717 skimstream = getattr(skimConfig,skim)
1718 if isinstance(skimstream,cms.Path):
1720 self.blacklist_paths.append(skimstream)
1721 if (
not isinstance(skimstream,cms.FilteredStream)):
1723 shortname = skim.replace(
'SKIMStream',
'')
1724 if (sequence==
"all"):
1726 elif (shortname
in skimlist):
1729 if self._options.datatier==
'DQM':
1731 skimstreamDQM = cms.FilteredStream(
1732 responsible = skimstream.responsible,
1733 name = skimstream.name+
'DQM',
1734 paths = skimstream.paths,
1735 selectEvents = skimstream.selectEvents,
1736 content = self._options.datatier+
'EventContent',
1737 dataTier = cms.untracked.string(self._options.datatier)
1740 for i
in range(skimlist.count(shortname)):
1741 skimlist.remove(shortname)
1745 if (skimlist.__len__()!=0
and sequence!=
"all"):
1746 print(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1747 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1750 ''' Enrich the schedule with a user defined sequence ''' 1756 """ Enrich the schedule with the postreco step """ 1763 print(sequence,
"in preparing validation")
1765 from Validation.Configuration.autoValidation
import autoValidation
1767 sequence=sequence.split(
'.')[-1]
1768 if sequence.find(
',')!=-1:
1769 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1770 valSeqName=sequence.split(
',')[1].
split(
'+')
1775 prevalSeqName=sequence.split(
'+')
1776 valSeqName=sequence.split(
'+')
1782 postfix=
'_'+sequence
1783 prevalSeqName=[
'prevalidation'+postfix]
1784 valSeqName=[
'validation'+postfix]
1785 if not hasattr(self.
process,valSeqName[0]):
1787 valSeqName=[sequence]
1798 if (
'HLT' in self.
stepMap and not self._options.fast)
or self._options.hltProcess:
1799 for s
in valSeqName+prevalSeqName:
1802 for (i,s)
in enumerate(prevalSeqName):
1804 setattr(self.
process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.
process, s)) )
1805 self.schedule.append(getattr(self.
process,
'prevalidation_step%s'%NFI(i)))
1807 for (i,s)
in enumerate(valSeqName):
1808 setattr(self.
process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.
process, s)))
1809 self.schedule.append(getattr(self.
process,
'validation_step%s'%NFI(i)))
1815 if not 'DIGI' in self.
stepMap and not self._options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1816 if self._options.restoreRNDSeeds==
False and not self._options.restoreRNDSeeds==
True:
1817 self._options.restoreRNDSeeds=
True 1819 if not 'DIGI' in self.
stepMap and not self._options.fast:
1823 self._options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1825 if hasattr(self.
process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1827 for (i,s)
in enumerate(valSeqName):
1828 getattr(self.
process,
'validation_step%s'%NFI(i)).
insert(0, self.process.genstepfilter)
1834 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1835 It will climb down within PSets, VPSets and VInputTags to find its target""" 1836 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1843 if isinstance(pset, cms._Parameterizable):
1844 for name
in pset.parameters_().
keys():
1850 value = getattr(pset,name)
1851 type = value.pythonTypeName()
1852 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1853 self.
doIt(value,base+
"."+name)
1854 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1855 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1856 elif type
in (
'cms.string',
'cms.untracked.string'):
1860 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1861 for (i,n)
in enumerate(value):
1862 if not isinstance(n, cms.InputTag):
1869 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1870 for (i,n)
in enumerate(value):
1873 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1876 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1881 label = visitee.label()
1882 except AttributeError:
1883 label =
'<Module not in a Process>' 1885 label =
'other execption' 1886 self.
doIt(visitee, label)
1893 print(
"Replacing all InputTag %s => %s"%(oldT,newT))
1896 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1898 self.additionalCommands.append(loadMe)
1899 self.additionalCommands.append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1903 if self._options.hltProcess:
1904 proc=self._options.hltProcess
1906 proc=self.process.name_()
1907 if proc==HLTprocess:
return 1909 print(
"replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc))
1911 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.
additionalCommands:
1912 self.additionalCommands.append(
'from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1913 self.additionalCommands.append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1919 while '@' in repr(seqList)
and level<maxLevel:
1921 for specifiedCommand
in seqList:
1922 if specifiedCommand.startswith(
'@'):
1923 location=specifiedCommand[1:]
1924 if not location
in mapping:
1925 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1926 mappedTo=mapping[location]
1928 mappedTo=mappedTo[index]
1929 seqList.remove(specifiedCommand)
1930 seqList.extend(mappedTo.split(
'+'))
1933 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1941 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1942 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1943 from DQMOffline.Configuration.autoDQM
import autoDQM
1947 if len(set(sequenceList))!=len(sequenceList):
1948 sequenceList=
list(set(sequenceList))
1949 print(
"Duplicate entries for DQM:, using",sequenceList)
1951 pathName=
'dqmoffline_step' 1952 for (i,sequence)
in enumerate(sequenceList):
1954 pathName=
'dqmoffline_%d_step'%(i)
1956 if 'HLT' in self.stepMap.keys()
or self._options.hltProcess:
1959 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process,sequence ) ) )
1960 self.schedule.append(getattr(self.
process,pathName))
1962 if hasattr(self.
process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1964 getattr(self.
process,pathName).
insert(0,self.process.genstepfilter)
1966 pathName=
'dqmofflineOnPAT_step' 1967 for (i,sequence)
in enumerate(postSequenceList):
1969 pathName=
'dqmofflineOnPAT_%d_step'%(i)
1971 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process, sequence ) ) )
1972 self.schedule.append(getattr(self.
process,pathName))
1975 """ Enrich the process with harvesting step """ 1976 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self._options.harvesting+
'_cff' 1980 sequence = sequence.split(
'.')[-1]
1983 harvestingList = sequence.split(
"+")
1984 from DQMOffline.Configuration.autoDQM
import autoDQM
1985 from Validation.Configuration.autoValidation
import autoValidation
1987 combined_mapping = copy.deepcopy( autoDQM )
1988 combined_mapping.update( autoValidation )
1989 self.
expandMapping(harvestingList,combined_mapping,index=-1)
1991 if len(set(harvestingList))!=len(harvestingList):
1992 harvestingList=
list(set(harvestingList))
1993 print(
"Duplicate entries for HARVESTING, using",harvestingList)
1995 for name
in harvestingList:
1996 if not name
in harvestingConfig.__dict__:
1997 print(name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
1999 harvestingstream = getattr(harvestingConfig,name)
2000 if isinstance(harvestingstream,cms.Path):
2001 self.schedule.append(harvestingstream)
2002 self.blacklist_paths.append(harvestingstream)
2003 if isinstance(harvestingstream,cms.Sequence):
2004 setattr(self.
process,name+
"_step",cms.Path(harvestingstream))
2005 self.schedule.append(getattr(self.
process,name+
"_step"))
2011 """ Enrich the process with AlCaHarvesting step """ 2013 sequence=sequence.split(
".")[-1]
2016 harvestingList = sequence.split(
"+")
2020 from Configuration.AlCa.autoPCL
import autoPCL
2023 for name
in harvestingConfig.__dict__:
2024 harvestingstream = getattr(harvestingConfig,name)
2025 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2026 self.schedule.append(harvestingstream)
2027 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2028 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2029 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2030 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2032 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2033 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2034 harvestingList.remove(name)
2036 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2037 self.schedule.append(lastStep)
2039 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2040 print(
"The following harvesting could not be found : ", harvestingList)
2041 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2051 self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2052 self.schedule.append(self.process.reconstruction)
2056 """ Add useful info for the production. """ 2057 self.process.configurationMetadata=cms.untracked.PSet\
2058 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2059 name=cms.untracked.string(
"Applications"),
2060 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2063 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2068 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2069 self.
pythonCfgCode +=
"# with command line options: "+self._options.arguments+
'\n' 2070 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2077 if hasattr(self.
_options,
"era")
and self._options.era :
2079 from Configuration.StandardSequences.Eras
import eras
2080 for requestedEra
in self._options.era.split(
",") :
2081 modifierStrings.append(requestedEra)
2082 modifierImports.append(eras.pythonCfgLines[requestedEra])
2083 modifiers.append(getattr(eras,requestedEra))
2086 if hasattr(self.
_options,
"procModifiers")
and self._options.procModifiers:
2089 for pm
in self._options.procModifiers.split(
','):
2090 modifierStrings.append(pm)
2091 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2092 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2095 self.
pythonCfgCode +=
"process = cms.Process('"+self._options.name+
"'" 2098 if len(modifierStrings)>0:
2105 if len(modifiers)>0:
2106 self.
process = cms.Process(self._options.name,*modifiers)
2108 self.
process = cms.Process(self._options.name)
2114 """ Prepare the configuration string and add missing pieces.""" 2126 outputModuleCfgCode=
"" 2127 if not 'HARVESTING' in self.stepMap.keys()
and not 'ALCAHARVEST' in self.stepMap.keys()
and not 'ALCAOUTPUT' in self.stepMap.keys()
and self.
with_output:
2132 self.
pythonCfgCode +=
"# import of standard configurations\n" 2137 if not hasattr(self.
process,
"configurationMetadata"):
2141 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2156 nl=sorted(self.additionalOutputs.keys())
2159 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2160 tmpOut = cms.EndPath(output)
2161 setattr(self.
process,name+
'OutPath',tmpOut)
2162 self.schedule.append(tmpOut)
2170 for object
in self._options.inlineObjets.split(
','):
2173 if not hasattr(self.
process,object):
2174 print(
'cannot inline -'+object+
'- : not known')
2181 for path
in self.process.paths:
2185 for endpath
in self.process.endpaths:
2191 result =
"process.schedule = cms.Schedule(" 2194 self.process.schedule = cms.Schedule()
2196 if not isinstance(item, cms.Schedule):
2197 self.process.schedule.append(item)
2199 self.process.schedule.extend(item)
2201 if hasattr(self.
process,
"HLTSchedule"):
2202 beforeHLT = self.
schedule[:self.schedule.index(self.process.HLTSchedule)]
2203 afterHLT = self.
schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2204 pathNames = [
'process.'+p.label_()
for p
in beforeHLT]
2205 result +=
','.
join(pathNames)+
')\n' 2206 result +=
'process.schedule.extend(process.HLTSchedule)\n' 2207 pathNames = [
'process.'+p.label_()
for p
in afterHLT]
2208 result +=
'process.schedule.extend(['+
','.
join(pathNames)+
'])\n' 2210 pathNames = [
'process.'+p.label_()
for p
in self.
schedule]
2211 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2216 self.process.schedule.associate(getattr(self.
process, labelToAssociate))
2217 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2221 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2224 if self._options.nThreads
is not "1":
2227 self.
pythonCfgCode +=
"process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+
")\n" 2228 self.
pythonCfgCode +=
"process.options.numberOfStreams=cms.untracked.uint32("+self._options.nStreams+
")\n" 2229 self.
pythonCfgCode +=
"process.options.numberOfConcurrentLuminosityBlocks=cms.untracked.uint32("+self._options.nConcurrentLumis+
")\n" 2230 self.process.options.numberOfThreads=cms.untracked.uint32(
int(self._options.nThreads))
2231 self.process.options.numberOfStreams=cms.untracked.uint32(
int(self._options.nStreams))
2232 self.process.options.numberOfConcurrentLuminosityBlocks=cms.untracked.uint32(
int(self._options.nConcurrentLumis))
2234 if self._options.isRepacked:
2236 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2237 self.
pythonCfgCode +=
"MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n" 2238 MassReplaceInputTag(self.
process, new=
"rawDataMapperByLabel", old=
"rawDataCollector")
2242 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2250 for path
in self.process.paths:
2259 if self._options.runUnscheduled:
2262 self.
pythonCfgCode+=
"#do not add changes to your config after this point (unless you know what you are doing)\n" 2263 self.
pythonCfgCode+=
"from FWCore.ParameterSet.Utilities import convertToUnscheduled\n" 2264 self.
pythonCfgCode+=
"process=convertToUnscheduled(process)\n" 2266 from FWCore.ParameterSet.Utilities
import convertToUnscheduled
2273 if hasattr(self.
process,
"logErrorHarvester"):
2275 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2276 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2277 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2278 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2285 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2286 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2287 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2289 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2292 imports = cms.specialImportRegistry.getSpecialImports()
2293 if len(imports) > 0:
2295 index = self.pythonCfgCode.find(
"import FWCore.ParameterSet.Config")
2297 index = self.pythonCfgCode.find(
"\n",index)
2303 if self._options.io:
2305 if not self._options.io.endswith(
'.io'): self._option.io+=
'.io' 2306 io=open(self._options.io,
'w')
2308 if hasattr(self.process.source,
"fileNames"):
2309 if len(self.process.source.fileNames.value()):
2310 ioJson[
'primary']=self.process.source.fileNames.value()
2311 if hasattr(self.process.source,
"secondaryFileNames"):
2312 if len(self.process.source.secondaryFileNames.value()):
2313 ioJson[
'secondary']=self.process.source.secondaryFileNames.value()
2314 if self._options.pileup_input
and (self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:')):
2315 ioJson[
'pileup']=self._options.pileup_input[4:]
2316 for (o,om)
in self.process.outputModules_().
items():
2317 ioJson[o]=om.fileName.value()
2318 ioJson[
'GT']=self.process.GlobalTag.globaltag.value()
2322 io.write(json.dumps(ioJson))
def load(self, includeFile)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def prepare_L1REPACK(self, sequence=None)
def finalizeFastSimHLT(self)
inliner
load the relevant part
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def customiseEarlyDelete(process)
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
S & print(S &os, JobReport::InputFile const &f)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_RECO(self, sequence="reconstruction")
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def convertToUnscheduled(proc)
def prepare_PATGEN(self, sequence="miniGEN")
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def prepare_RECOSIM(self, sequence="recosim")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, sequence=None)
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_NANO(self, sequence="nanoAOD")
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
def prepare_ALCAHARVEST(self, sequence=None)
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def completeInputCommand(self)
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def addStandardSequences(self)
def prepare_PAT(self, sequence="miniAOD")
nextScheduleIsConditional
put the filtering path in the schedule