3 from __future__
import print_function
4 __version__ =
"$Revision: 1.19 $" 5 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 7 import FWCore.ParameterSet.Config
as cms
8 from FWCore.ParameterSet.Modules
import _Module
12 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
18 from subprocess
import Popen,PIPE
19 import FWCore.ParameterSet.DictTypes
as DictTypes
25 defaultOptions.datamix =
'DataOnSim' 26 defaultOptions.isMC=
False 27 defaultOptions.isData=
True 28 defaultOptions.step=
'' 29 defaultOptions.pileup=
'NoPileUp' 30 defaultOptions.pileup_input =
None 31 defaultOptions.pileup_dasoption =
'' 32 defaultOptions.geometry =
'SimDB' 33 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
34 defaultOptions.magField =
'' 35 defaultOptions.conditions =
None 36 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
37 defaultOptions.harvesting=
'AtRunEnd' 38 defaultOptions.gflash =
False 39 defaultOptions.number = -1
40 defaultOptions.number_out =
None 41 defaultOptions.arguments =
"" 42 defaultOptions.name =
"NO NAME GIVEN" 43 defaultOptions.evt_type =
"" 44 defaultOptions.filein =
"" 45 defaultOptions.dasquery=
"" 46 defaultOptions.dasoption=
"" 47 defaultOptions.secondfilein =
"" 48 defaultOptions.customisation_file = []
49 defaultOptions.customisation_file_unsch = []
50 defaultOptions.customise_commands =
"" 51 defaultOptions.inline_custom=
False 52 defaultOptions.particleTable =
'pythiapdt' 53 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
54 defaultOptions.dirin =
'' 55 defaultOptions.dirout =
'' 56 defaultOptions.filetype =
'EDM' 57 defaultOptions.fileout =
'output.root' 58 defaultOptions.filtername =
'' 59 defaultOptions.lazy_download =
False 60 defaultOptions.custom_conditions =
'' 61 defaultOptions.hltProcess =
'' 62 defaultOptions.eventcontent =
None 63 defaultOptions.datatier =
None 64 defaultOptions.inlineEventContent =
True 65 defaultOptions.inlineObjets =
'' 66 defaultOptions.hideGen=
False 67 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
68 defaultOptions.beamspot=
None 69 defaultOptions.outputDefinition =
'' 70 defaultOptions.inputCommands =
None 71 defaultOptions.outputCommands =
None 72 defaultOptions.inputEventContent =
'' 73 defaultOptions.dropDescendant =
False 74 defaultOptions.relval =
None 75 defaultOptions.profile =
None 76 defaultOptions.isRepacked =
False 77 defaultOptions.restoreRNDSeeds =
False 78 defaultOptions.donotDropOnInput =
'' 79 defaultOptions.python_filename =
'' 80 defaultOptions.io=
None 81 defaultOptions.lumiToProcess=
None 82 defaultOptions.fast=
False 83 defaultOptions.runsAndWeightsForMC =
None 84 defaultOptions.runsScenarioForMC =
None 85 defaultOptions.runUnscheduled =
False 86 defaultOptions.timeoutOutput =
False 87 defaultOptions.nThreads =
'1' 88 defaultOptions.nStreams =
'0' 89 defaultOptions.nConcurrentLumis =
'1' 93 theObject = getattr(process,name)
94 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
95 return "process."+name+
" = " + theObject.dumpPython(
"process")
96 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
97 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 99 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 102 import FWCore.ParameterSet.Config
as cms
105 for line
in open(fileName,
'r'): 106 if line.count(
".root")>=2:
108 entries=line.replace(
"\n",
"").
split()
109 prim.append(entries[0])
110 sec.append(entries[1])
111 elif (line.find(
".root")!=-1):
112 entry=line.replace(
"\n",
"")
115 prim = sorted(
list(set(prim)))
116 sec = sorted(
list(set(sec)))
118 if not hasattr(s,
"fileNames"):
119 s.fileNames=cms.untracked.vstring(prim)
121 s.fileNames.extend(prim)
123 if not hasattr(s,
"secondaryFileNames"):
124 s.secondaryFileNames=cms.untracked.vstring(sec)
126 s.secondaryFileNames.extend(sec)
127 print(
"found files: ",prim)
129 raise Exception(
"There are not files in input from the file list")
131 print(
"found parent files:",sec)
136 import FWCore.ParameterSet.Config
as cms
139 print(
"the query is",query)
142 while eC!=0
and count<3:
144 print(
'Sleeping, then retrying DAS')
146 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True)
148 tupleP = os.waitpid(p.pid, 0)
152 print(
"DAS succeeded after",count,
"attempts",eC)
154 print(
"DAS failed 3 times- I give up")
155 for line
in pipe.split(
'\n'):
156 if line.count(
".root")>=2:
158 entries=line.replace(
"\n",
"").
split()
159 prim.append(entries[0])
160 sec.append(entries[1])
161 elif (line.find(
".root")!=-1):
162 entry=line.replace(
"\n",
"")
165 prim = sorted(
list(set(prim)))
166 sec = sorted(
list(set(sec)))
168 if not hasattr(s,
"fileNames"):
169 s.fileNames=cms.untracked.vstring(prim)
171 s.fileNames.extend(prim)
173 if not hasattr(s,
"secondaryFileNames"):
174 s.secondaryFileNames=cms.untracked.vstring(sec)
176 s.secondaryFileNames.extend(sec)
177 print(
"found files: ",prim)
179 print(
"found parent files:",sec)
182 def anyOf(listOfKeys,dict,opt=None):
191 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
194 """The main building routines """ 196 def __init__(self, options, process = None, with_output = False, with_input = False ):
197 """options taken from old cmsDriver and optparse """ 199 options.outfile_name = options.dirout+options.fileout
203 if self._options.isData
and options.isMC:
204 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
209 if 'ENDJOB' in self._options.step:
210 if (hasattr(self.
_options,
"outputDefinition")
and \
211 self._options.outputDefinition !=
'' and \
212 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self._options.outputDefinition)))
or \
213 (hasattr(self.
_options,
"datatier")
and \
214 self._options.datatier
and \
215 'DQMIO' in self._options.datatier):
216 print(
"removing ENDJOB from steps since not compatible with DQMIO dataTier")
217 self._options.step=self._options.step.replace(
',ENDJOB',
'')
222 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
225 for step
in self._options.step.split(
","):
226 if step==
'':
continue 227 stepParts = step.split(
":")
228 stepName = stepParts[0]
229 if stepName
not in stepList
and not stepName.startswith(
're'):
230 raise ValueError(
"Step "+stepName+
" unknown")
231 if len(stepParts)==1:
233 elif len(stepParts)==2:
235 elif len(stepParts)==3:
236 self.
stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
238 raise ValueError(
"Step definition "+step+
" invalid")
239 self.stepKeys.append(stepName)
246 if hasattr(self.
_options,
"no_output_flag")
and self._options.no_output_flag:
273 Function to add the igprof profile service so that you can dump in the middle 276 profileOpts = self._options.profile.split(
':')
278 profilerInterval = 100
279 profilerFormat =
None 280 profilerJobFormat =
None 286 startEvent = profileOpts.pop(0)
287 if not startEvent.isdigit():
288 raise Exception(
"%s is not a number" % startEvent)
289 profilerStart =
int(startEvent)
291 eventInterval = profileOpts.pop(0)
292 if not eventInterval.isdigit():
293 raise Exception(
"%s is not a number" % eventInterval)
294 profilerInterval =
int(eventInterval)
296 profilerFormat = profileOpts.pop(0)
299 if not profilerFormat:
300 profilerFormat =
"%s___%s___%%I.gz" % (
301 self._options.evt_type.replace(
"_cfi",
""),
303 str(self._options.step) +
str(self._options.pileup) +
str(self._options.conditions) +
304 str(self._options.datatier) +
str(self._options.profileTypeLabel)
307 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
308 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
309 elif not profilerJobFormat:
310 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 312 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
315 includeFile = includeFile.replace(
'/',
'.')
316 self.process.load(includeFile)
317 return sys.modules[includeFile]
320 """helper routine to load am memorize imports""" 323 includeFile = includeFile.replace(
'/',
'.')
324 self.imports.append(includeFile)
325 self.process.load(includeFile)
326 return sys.modules[includeFile]
329 """helper routine to remember replace statements""" 330 self.additionalCommands.append(command)
331 if not command.strip().startswith(
"#"):
334 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
338 if 'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys():
339 self.process.options = cms.untracked.PSet( Rethrow = cms.untracked.vstring(
'ProductNotFound'),fileMode = cms.untracked.string(
'FULLMERGE'))
341 self.process.options = cms.untracked.PSet( )
343 self.addedObjects.append((
"",
"options"))
345 if self._options.lazy_download:
346 self.process.AdaptorConfig = cms.Service(
"AdaptorConfig",
347 stats = cms.untracked.bool(
True),
348 enable = cms.untracked.bool(
True),
349 cacheHint = cms.untracked.string(
"lazy-download"),
350 readHint = cms.untracked.string(
"read-ahead-buffered")
352 self.addedObjects.append((
"Setup lazy download",
"AdaptorConfig"))
357 if self._options.profile:
359 self.process.IgProfService = cms.Service(
"IgProfService",
360 reportFirstEvent = cms.untracked.int32(start),
361 reportEventInterval = cms.untracked.int32(interval),
362 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
363 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
364 self.addedObjects.append((
"Setup IGProf Service for profiling",
"IgProfService"))
367 """Here we decide how many evts will be processed""" 368 self.process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(
int(self._options.number)))
369 if self._options.number_out:
370 self.process.maxEvents.output = cms.untracked.int32(
int(self._options.number_out))
371 self.addedObjects.append((
"",
"maxEvents"))
374 """Here the source is built. Priority: file, generator""" 375 self.addedObjects.append((
"Input source",
"source"))
377 def filesFromOption(self):
378 for entry
in self._options.filein.split(
','):
380 if entry.startswith(
"filelist:"):
382 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
383 filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
385 self.process.source.fileNames.append(self._options.dirin+entry)
386 if self._options.secondfilein:
387 if not hasattr(self.process.source,
"secondaryFileNames"):
388 raise Exception(
"--secondfilein not compatible with "+self._options.filetype+
"input type")
389 for entry
in self._options.secondfilein.split(
','):
391 if entry.startswith(
"filelist:"):
392 self.process.source.secondaryFileNames.extend((
filesFromList(entry[9:]))[0])
393 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
394 self.process.source.secondaryFileNames.extend((
filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
396 self.process.source.secondaryFileNames.append(self._options.dirin+entry)
398 if self._options.filein
or self._options.dasquery:
399 if self._options.filetype ==
"EDM":
400 self.process.source=cms.Source(
"PoolSource",
401 fileNames = cms.untracked.vstring(),
402 secondaryFileNames= cms.untracked.vstring())
403 filesFromOption(self)
404 elif self._options.filetype ==
"DAT":
405 self.process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
406 filesFromOption(self)
407 elif self._options.filetype ==
"LHE":
408 self.process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
409 if self._options.filein.startswith(
"lhe:"):
411 args=self._options.filein.split(
':')
413 print(
'LHE input from article ',article)
414 location=
'/store/lhe/' 416 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
417 for line
in textOfFiles:
418 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
419 self.process.source.fileNames.append(location+article+
'/'+fileName)
422 print(
'Issue to load LHE files, please check and try again.')
425 if len(self.process.source.fileNames)==0:
426 print(
'Issue with empty filename, but can pass line check')
429 self.process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
431 filesFromOption(self)
433 elif self._options.filetype ==
"DQM":
434 self.process.source=cms.Source(
"DQMRootSource",
435 fileNames = cms.untracked.vstring())
436 filesFromOption(self)
438 elif self._options.filetype ==
"DQMDAQ":
440 self.process.source=cms.Source(
"DQMStreamerReader")
443 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
444 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
446 if self._options.dasquery!=
'':
447 self.process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
448 filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
450 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
451 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
454 if 'GEN' in self.stepMap.keys()
and not self._options.filetype ==
"LHE":
455 if self._options.inputCommands:
456 self._options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 458 self._options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 460 if self.process.source
and self._options.inputCommands
and not self._options.filetype ==
"LHE":
461 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
462 for command
in self._options.inputCommands.split(
','):
464 command = command.strip()
465 if command==
'':
continue 466 self.process.source.inputCommands.append(command)
467 if not self._options.dropDescendant:
468 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
470 if self._options.lumiToProcess:
471 import FWCore.PythonUtilities.LumiList
as LumiList
472 self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange(
LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().
split(
',') )
474 if 'GEN' in self.stepMap.keys()
or 'LHE' in self.
stepMap or (
not self._options.filein
and hasattr(self.
_options,
"evt_type")):
475 if self.process.source
is None:
476 self.process.source=cms.Source(
"EmptySource")
480 if self._options.runsAndWeightsForMC
or self._options.runsScenarioForMC :
481 if not self._options.isMC :
482 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
483 if self._options.runsAndWeightsForMC:
486 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
487 if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
488 __import__(RunsAndWeights[self._options.runsScenarioForMC])
489 self.
runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
491 self.
runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
494 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
496 self.additionalCommands.append(
'import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
497 self.additionalCommands.append(
'ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.
runsAndWeights))
502 """ Add output module to the process """ 504 if self._options.outputDefinition:
505 if self._options.datatier:
506 print(
"--datatier & --eventcontent options ignored")
509 outList = eval(self._options.outputDefinition)
510 for (id,outDefDict)
in enumerate(outList):
511 outDefDictStr=outDefDict.__str__()
512 if not isinstance(outDefDict,dict):
513 raise Exception(
"--output needs to be passed a list of dict"+self._options.outputDefinition+
" is invalid")
515 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
518 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
519 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
520 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
521 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
522 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
524 if not theModuleLabel:
525 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
526 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
527 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 529 for name
in tryNames:
530 if not hasattr(self.
process,name):
533 if not theModuleLabel:
534 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
536 defaultFileName=self._options.outfile_name
538 defaultFileName=self._options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
540 theFileName=self._options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
541 if not theFileName.endswith(
'.root'):
545 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
546 if theStreamType==
'DQMIO': theStreamType=
'DQM' 547 if theStreamType==
'ALL':
548 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
550 theEventContent = getattr(self.
process, theStreamType+
"EventContent")
554 if theStreamType==
'ALCARECO' and not theFilterName:
555 theFilterName=
'StreamALCACombined' 558 CppType=
'PoolOutputModule' 559 if self._options.timeoutOutput:
560 CppType=
'TimeoutPoolOutputModule' 561 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 562 output = cms.OutputModule(CppType,
563 theEventContent.clone(),
564 fileName = cms.untracked.string(theFileName),
565 dataset = cms.untracked.PSet(
566 dataTier = cms.untracked.string(theTier),
567 filterName = cms.untracked.string(theFilterName))
569 if not theSelectEvent
and hasattr(self.
process,
'generation_step')
and theStreamType!=
'LHE':
570 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
571 if not theSelectEvent
and hasattr(self.
process,
'filtering_step'):
572 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
574 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
577 if not hasattr(output,
'SelectEvents'):
578 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
580 output.SelectEvents.SelectEvents.extend(getattr(self.
process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
583 if hasattr(self.
process,theModuleLabel):
584 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
586 setattr(self.
process,theModuleLabel,output)
587 outputModule=getattr(self.
process,theModuleLabel)
588 setattr(self.
process,theModuleLabel+
'_step',cms.EndPath(outputModule))
589 path=getattr(self.
process,theModuleLabel+
'_step')
590 self.schedule.append(path)
592 if not self._options.inlineEventContent
and hasattr(self.
process,theStreamType+
"EventContent"):
593 def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process.
"+theStreamType+"EventContent.outputCommands)
"): 595 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
596 if theExtraOutputCommands:
597 if not isinstance(theExtraOutputCommands,list):
598 raise Exception(
"extra ouput command in --option must be a list of strings")
599 if hasattr(self.
process,theStreamType+
"EventContent"):
600 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
602 outputModule.outputCommands.extend(theExtraOutputCommands)
604 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
609 streamTypes=self._options.eventcontent.split(
',')
610 tiers=self._options.datatier.split(
',')
611 if not self._options.outputDefinition
and len(streamTypes)!=len(tiers):
612 raise Exception(
"number of event content arguments does not match number of datatier arguments")
615 if self._options.step.split(
',')[0].
split(
':')[0] ==
'ALCA':
618 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
619 if streamType==
'':
continue 620 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self._options.step:
continue 621 if streamType==
'DQMIO': streamType=
'DQM' 622 eventContent=streamType
624 if streamType ==
"NANOEDMAOD" :
625 eventContent =
"NANOAOD" 626 elif streamType ==
"NANOEDMAODSIM" :
627 eventContent =
"NANOAODSIM" 628 theEventContent = getattr(self.
process, eventContent+
"EventContent")
630 theFileName=self._options.outfile_name
631 theFilterName=self._options.filtername
633 theFileName=self._options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
634 theFilterName=self._options.filtername
635 CppType=
'PoolOutputModule' 636 if self._options.timeoutOutput:
637 CppType=
'TimeoutPoolOutputModule' 638 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 639 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 640 output = cms.OutputModule(CppType,
642 fileName = cms.untracked.string(theFileName),
643 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
644 filterName = cms.untracked.string(theFilterName)
647 if hasattr(self.
process,
"generation_step")
and streamType!=
'LHE':
648 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
649 if hasattr(self.
process,
"filtering_step"):
650 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
652 if streamType==
'ALCARECO':
653 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
655 if "MINIAOD" in streamType:
656 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
659 outputModuleName=streamType+
'output' 660 setattr(self.
process,outputModuleName,output)
661 outputModule=getattr(self.
process,outputModuleName)
662 setattr(self.
process,outputModuleName+
'_step',cms.EndPath(outputModule))
663 path=getattr(self.
process,outputModuleName+
'_step')
664 self.schedule.append(path)
666 if self._options.outputCommands
and streamType!=
'DQM':
667 for evct
in self._options.outputCommands.split(
','):
668 if not evct:
continue 669 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
671 if not self._options.inlineEventContent:
672 tmpstreamType=streamType
673 if "NANOEDM" in tmpstreamType :
674 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
675 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
677 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
679 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
685 Add selected standard sequences to the process 688 if self._options.pileup:
689 pileupSpec=self._options.pileup.split(
',')[0]
692 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
693 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
694 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
698 if '.' in pileupSpec:
699 mixingDict={
'file':pileupSpec}
700 elif pileupSpec.startswith(
'file:'):
701 mixingDict={
'file':pileupSpec[5:]}
704 mixingDict=copy.copy(Mixing[pileupSpec])
705 if len(self._options.pileup.split(
','))>1:
706 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(
',')+1:]))
709 if 'file:' in pileupSpec:
711 self.process.load(mixingDict[
'file'])
712 print(
"inlining mixing module configuration")
713 self._options.inlineObjets+=
',mix' 717 mixingDict.pop(
'file')
718 if not "DATAMIX" in self.stepMap.keys():
719 if self._options.pileup_input:
720 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
721 mixingDict[
'F']=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
722 elif self._options.pileup_input.startswith(
"filelist:"):
723 mixingDict[
'F']=(
filesFromList(self._options.pileup_input[9:]))[0]
725 mixingDict[
'F']=self._options.pileup_input.split(
',')
727 for command
in specialization:
729 if len(mixingDict)!=0:
730 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
737 if (
'SIM' in self.
stepMap or 'reSIM' in self.
stepMap)
and not self._options.fast:
742 print(
"Geometry option",self._options.geometry,
"unknown.")
749 stepSpec = self.
stepMap[stepName]
750 print(
"Step:", stepName,
"Spec:",stepSpec)
751 if stepName.startswith(
're'):
753 if stepName[2:]
not in self._options.donotDropOnInput:
754 self._options.inputEventContent=
'%s,%s'%(stepName.upper(),self._options.inputEventContent)
755 stepName=stepName[2:]
757 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
758 elif isinstance(stepSpec, list):
759 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
760 elif isinstance(stepSpec, tuple):
761 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
763 raise ValueError(
"Invalid step definition")
765 if self._options.restoreRNDSeeds!=
False:
767 if self._options.restoreRNDSeeds==
True:
768 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
770 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
771 if self._options.inputEventContent
or self._options.inputCommands:
772 if self._options.inputCommands:
773 self._options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 775 self._options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 779 if self._options.inputEventContent:
781 def dropSecondDropStar(iec):
791 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
792 for evct
in self._options.inputEventContent.split(
','):
793 if evct==
'':
continue 794 theEventContent = getattr(self.
process, evct+
"EventContent")
795 if hasattr(theEventContent,
'outputCommands'):
796 self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
797 if hasattr(theEventContent,
'inputCommands'):
798 self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
800 dropSecondDropStar(self.process.source.inputCommands)
802 if not self._options.dropDescendant:
803 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
809 """Add conditions to the process""" 810 if not self._options.conditions:
return 812 if 'FrontierConditions_GlobalTag' in self._options.conditions:
813 print(
'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
814 self._options.conditions = self._options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
818 self.process.GlobalTag =
GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
819 self.additionalCommands.append(
'from Configuration.AlCa.GlobalTag import GlobalTag')
820 self.additionalCommands.append(
'process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
824 """Include the customise code """ 828 for c
in self._options.customisation_file:
829 custOpt.extend(c.split(
","))
831 for c
in self._options.customisation_file_unsch:
832 custOpt.extend(c.split(
","))
838 raise Exception(
"more than . in the specification:"+opt)
839 fileName=opt.split(
'.')[0]
840 if opt.count(
'.')==0: rest=
'customise' 842 rest=opt.split(
'.')[1]
843 if rest==
'py': rest=
'customise' 845 if fileName
in custMap:
846 custMap[fileName].extend(rest.split(
'+'))
848 custMap[fileName]=rest.split(
'+')
853 final_snippet=
'\n# customisation of the process.\n' 857 allFcn.extend(custMap[opt])
859 if allFcn.count(fcn)!=1:
860 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
864 packageName = f.replace(
".py",
"").
replace(
"/",
".")
865 __import__(packageName)
866 package = sys.modules[packageName]
869 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
871 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 872 if self._options.inline_custom:
873 for line
in file(customiseFile,
'r'): 874 if "import FWCore.ParameterSet.Config" in line:
876 final_snippet += line
878 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
879 for fcn
in custMap[f]:
880 print(
"customising the process with",fcn,
"from",f)
881 if not hasattr(package,fcn):
883 raise Exception(
"config "+f+
" has no function "+fcn)
887 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
888 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
891 final_snippet +=
'\n# End of customisation functions\n' 897 final_snippet=
'\n# Customisation from command line\n' 898 if self._options.customise_commands:
900 for com
in self._options.customise_commands.split(
'\\n'):
901 com=string.lstrip(com)
903 final_snippet +=
'\n'+com
914 if self._options.particleTable
not in defaultOptions.particleTableList:
915 print(
'Invalid particle table provided. Options are:')
916 print(defaultOptions.particleTable)
920 self.
loadAndRemember(
'SimGeneral.HepPDTESSource.'+self._options.particleTable+
'_cfi')
933 if self._options.isRepacked: self.
RAW2DIGIDefaultCFF=
"Configuration/StandardSequences/RawToDigi_DataMapper_cff" 954 if "DATAMIX" in self.stepMap.keys():
958 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 976 if self._options.fast
or (
'RAW2DIGI' in self.
stepMap and 'RECO' in self.
stepMap):
996 if not self._options.beamspot:
997 self._options.beamspot=VtxSmearedDefaultKey
1000 if self._options.isMC==
True:
1002 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 1003 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 1006 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1009 self._options.beamspot =
None 1015 if self._options.scenario==
'cosmics':
1016 self._options.pileup=
'Cosmics' 1017 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1018 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1019 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1023 if self._options.isMC==
True:
1029 if self._options.scenario==
'HeavyIons':
1030 if not self._options.beamspot:
1031 self._options.beamspot=VtxSmearedHIDefaultKey
1036 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1038 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1041 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1043 if self._options.isMC==
True:
1053 if self._options.isData:
1054 if self._options.magField==defaultOptions.magField:
1055 print(
"magnetic field option forced to: AutoFromDBCurrent")
1056 self._options.magField=
'AutoFromDBCurrent' 1057 self.
magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace(
'.',
'')+
'_cff' 1058 self.
magFieldCFF = self.magFieldCFF.replace(
"__",
'_')
1064 if self._options.fast:
1065 if 'start' in self._options.conditions.lower():
1066 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1068 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1070 def inGeometryKeys(opt):
1071 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1072 if opt
in GeometryConf:
1073 return GeometryConf[opt]
1077 geoms=self._options.geometry.split(
',')
1078 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).
split(
',')
1081 if '/' in geoms[1]
or '_cff' in geoms[1]:
1084 self.
GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1086 if (geoms[0].startswith(
'DB:')):
1091 if '/' in geoms[0]
or '_cff' in geoms[0]:
1094 simGeometry=geoms[0]
1095 if self._options.gflash==
True:
1096 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1098 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1101 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1102 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1104 if self._options.scenario==
'nocoll' or self._options.scenario==
'cosmics':
1105 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1106 self._options.beamspot=
'NoSmear' 1109 if self._options.fast:
1110 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1111 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1118 if self._options.pileup==
'default':
1119 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1120 self._options.pileup=MixingDefaultKey
1124 if self._options.isData:
1125 self._options.pileup=
None 1133 output = cms.OutputModule(
"PoolOutputModule")
1134 if stream.selectEvents.parameters_().__len__()!=0:
1135 output.SelectEvents = stream.selectEvents
1137 output.SelectEvents = cms.untracked.PSet()
1138 output.SelectEvents.SelectEvents=cms.vstring()
1139 if isinstance(stream.paths,tuple):
1140 for path
in stream.paths:
1141 output.SelectEvents.SelectEvents.append(path.label())
1143 output.SelectEvents.SelectEvents.append(stream.paths.label())
1147 if isinstance(stream.content,str):
1148 evtPset=getattr(self.process,stream.content)
1149 for p
in evtPset.parameters_():
1150 setattr(output,p,getattr(evtPset,p))
1151 if not self._options.inlineEventContent:
1152 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1154 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1156 output.outputCommands = stream.content
1159 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1161 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1162 filterName = cms.untracked.string(stream.name))
1164 if self._options.filtername:
1165 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1168 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1170 if workflow
in (
"producers,full"):
1171 if isinstance(stream.paths,tuple):
1172 for path
in stream.paths:
1173 self.schedule.append(path)
1175 self.schedule.append(stream.paths)
1179 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1180 self.additionalOutputs[name] = output
1181 setattr(self.process,name,output)
1183 if workflow ==
'output':
1185 filterList = output.SelectEvents.SelectEvents
1186 for i, filter
in enumerate(filterList):
1187 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1197 if ( len(sequence.split(
'.'))==1 ):
1199 elif ( len(sequence.split(
'.'))==2 ):
1201 sequence=sequence.split(
'.')[1]
1203 print(
"sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1204 print(sequence,
"not recognized")
1211 for i,s
in enumerate(seq.split(
'*')):
1213 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, s) ))
1215 p=getattr(self.
process,prefix)
1216 tmp = getattr(self.
process, s)
1217 if isinstance(tmp, cms.Task):
1221 self.schedule.append(getattr(self.
process,prefix))
1227 self.conditionalPaths.append(prefix)
1228 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, seq) ))
1229 self.schedule.append(getattr(self.
process,prefix))
1231 for i,s
in enumerate(seq.split(
'+')):
1233 setattr(self.
process,sn,getattr(cms,what)( getattr(self.
process, s) ))
1234 self.schedule.append(getattr(self.
process,sn))
1248 """ Enrich the process with alca streams """ 1250 sequence = sequence.split(
'.')[-1]
1253 alcaList = sequence.split(
"+")
1255 from Configuration.AlCa.autoAlca
import autoAlca
1259 for name
in alcaConfig.__dict__:
1260 alcastream = getattr(alcaConfig,name)
1261 shortName = name.replace(
'ALCARECOStream',
'')
1262 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1263 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1264 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1265 self.AlCaPaths.append(shortName)
1266 if 'DQM' in alcaList:
1267 if not self._options.inlineEventContent
and hasattr(self.
process,name):
1268 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1270 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1273 if self._options.hltProcess
or 'HLT' in self.
stepMap:
1274 if isinstance(alcastream.paths,tuple):
1275 for path
in alcastream.paths:
1280 for i
in range(alcaList.count(shortName)):
1281 alcaList.remove(shortName)
1284 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1285 path = getattr(alcaConfig,name)
1286 self.schedule.append(path)
1287 alcaList.remove(
'DQM')
1289 if isinstance(alcastream,cms.Path):
1291 self.blacklist_paths.append(alcastream)
1294 if len(alcaList) != 0:
1296 for name
in alcaConfig.__dict__:
1297 alcastream = getattr(alcaConfig,name)
1298 if isinstance(alcastream,cms.FilteredStream):
1299 available.append(name.replace(
'ALCARECOStream',
''))
1300 print(
"The following alcas could not be found "+
str(alcaList))
1301 print(
"available ",available)
1303 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1308 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1309 print(
"Loading lhe fragment from",loadFragment)
1310 __import__(loadFragment)
1311 self.process.load(loadFragment)
1313 self._options.inlineObjets+=
','+sequence
1315 getattr(self.process,sequence).nEvents =
int(self._options.number)
1318 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1319 self.excludedPaths.append(
"lhe_step")
1320 self.schedule.append( self.process.lhe_step )
1323 """ load the fragment of generator configuration """ 1328 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'')
1330 if not '/' in loadFragment:
1331 loadFragment=
'Configuration.Generator.'+loadFragment
1333 loadFragment=loadFragment.replace(
'/',
'.')
1335 print(
"Loading generator fragment from",loadFragment)
1336 __import__(loadFragment)
1340 if not (self._options.filein
or self._options.dasquery):
1341 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1344 generatorModule=sys.modules[loadFragment]
1345 genModules=generatorModule.__dict__
1351 if self._options.hideGen:
1354 self.process.load(loadFragment)
1356 import FWCore.ParameterSet.Modules
as cmstypes
1357 for name
in genModules:
1358 theObject = getattr(generatorModule,name)
1359 if isinstance(theObject, cmstypes._Module):
1360 self._options.inlineObjets=name+
','+self._options.inlineObjets
1361 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1362 self._options.inlineObjets+=
','+name
1364 if sequence == self.
GENDefaultSeq or sequence ==
'pgen_genonly':
1365 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1367 elif 'generator' in genModules:
1370 """ Enrich the schedule with the rest of the generation step """ 1372 genSeqName=sequence.split(
'.')[-1]
1376 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1377 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1380 raise Exception(
"VertexSmearing type or beamspot "+self._options.beamspot+
" unknown.")
1382 if self._options.scenario ==
'HeavyIons':
1383 if self._options.pileup==
'HiMixGEN':
1384 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1386 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1388 self.process.generation_step = cms.Path( getattr(self.
process,genSeqName) )
1389 self.schedule.append(self.process.generation_step)
1392 self.
executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1398 """ Enrich the schedule with the summary of the filter step """ 1405 """ Enrich the schedule with the simulation step""" 1407 if not self._options.fast:
1408 if self._options.gflash==
True:
1411 if self._options.magField==
'0T':
1414 if self._options.magField==
'0T':
1415 self.
executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1421 """ Enrich the schedule with the digitisation step""" 1424 if self._options.gflash==
True:
1425 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1427 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1428 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1430 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and sequence !=
'pdigi_hi_nogen' and not self.process.source.type_()==
'EmptySource' and not self._options.filetype ==
"LHE":
1431 if self._options.inputEventContent==
'':
1432 self._options.inputEventContent=
'REGEN' 1434 self._options.inputEventContent=self._options.inputEventContent+
',REGEN' 1441 """ Enrich the schedule with the crossing frame writer step""" 1447 """ Enrich the schedule with the digitisation step""" 1451 if self._options.pileup_input:
1453 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
1454 theFiles=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1455 elif self._options.pileup_input.startswith(
"filelist:"):
1456 theFiles= (
filesFromList(self._options.pileup_input[9:]))[0]
1458 theFiles=self._options.pileup_input.split(
',')
1460 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1475 """ Enrich the schedule with the L1 simulation step""" 1476 assert(sequence ==
None)
1482 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1483 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1484 if sequence
in supported:
1485 self.
loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1486 if self._options.scenario ==
'HeavyIons':
1490 print(
"L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported)
1495 """ Enrich the schedule with the HLT simulation step""" 1497 print(
"no specification of the hlt menu has been given, should never happen")
1498 raise Exception(
'no HLT sequence provided')
1502 from Configuration.HLT.autoHLT
import autoHLT
1505 sequence = autoHLT[key]
1507 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1513 if self._options.scenario ==
'HeavyIons':
1514 optionsForHLT[
'type'] =
'HIon' 1516 optionsForHLT[
'type'] =
'GRun' 1517 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in six.iteritems(optionsForHLT))
1518 if sequence ==
'run,fromSource':
1519 if hasattr(self.process.source,
'firstRun'):
1520 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1521 elif hasattr(self.process.source,
'setRunNumber'):
1522 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1524 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1526 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1530 if self._options.isMC:
1531 self._options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1533 if self._options.name !=
'HLT':
1534 self.additionalCommands.append(
'from HLTrigger.Configuration.CustomConfigs import ProcessName')
1535 self.additionalCommands.append(
'process = ProcessName(process)')
1536 self.additionalCommands.append(
'')
1537 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1540 self.schedule.append(self.process.HLTSchedule)
1541 [self.blacklist_paths.append(path)
for path
in self.process.HLTSchedule
if isinstance(path,(cms.Path,cms.EndPath))]
1544 if self._options.fast:
1545 if not hasattr(self.
process,
'HLTEndSequence'):
1546 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1551 seqReco=sequence.split(
',')[1]
1552 seqDigi=sequence.split(
',')[0]
1554 print(
"RAW2RECO requires two specifications",sequence,
"insufficient")
1568 self.
loadAndRemember(
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1570 for filt
in allMetFilterPaths:
1571 self.schedule.append(getattr(self.
process,
'Flag_'+filt))
1574 ''' Enrich the schedule with L1 HW validation ''' 1577 print(
'\n\n\n DEPRECATED this has no action \n\n\n')
1581 ''' Enrich the schedule with L1 reconstruction ''' 1587 ''' Enrich the schedule with L1 reconstruction ''' 1593 ''' Enrich the schedule with a user defined filter sequence ''' 1595 filterConfig=self.
load(sequence.split(
'.')[0])
1596 filterSeq=sequence.split(
'.')[-1]
1598 class PrintAllModules(
object):
1602 def enter(self,visitee):
1604 label=visitee.label()
1609 def leave(self,v):
pass 1611 expander=PrintAllModules()
1613 self._options.inlineObjets+=
','+expander.inliner
1614 self._options.inlineObjets+=
','+filterSeq
1625 ''' Enrich the schedule with reconstruction ''' 1631 ''' Enrich the schedule with reconstruction ''' 1637 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1638 if not self._options.fast:
1639 print(
"ERROR: this step is only implemented for FastSim")
1642 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1646 ''' Enrich the schedule with PAT ''' 1649 self.labelsToAssociate.append(
'patTask')
1650 if not self._options.runUnscheduled:
1651 raise Exception(
"MiniAOD production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1652 if self._options.isData:
1653 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1655 if self._options.fast:
1656 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1658 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1660 if self._options.hltProcess:
1661 if len(self._options.customise_commands) > 1:
1662 self._options.customise_commands = self._options.customise_commands +
" \n" 1663 self._options.customise_commands = self._options.customise_commands +
"process.patTrigger.processName = \""+self._options.hltProcess+
"\"\n" 1664 self._options.customise_commands = self._options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1665 self._options.customise_commands = self._options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1672 ''' Enrich the schedule with PATGEN ''' 1674 self.labelsToAssociate.append(
'patGENTask')
1675 if not self._options.runUnscheduled:
1676 raise Exception(
"MiniGEN production can only run in unscheduled mode, please run cmsDriver with --runUnscheduled")
1677 if self._options.isData:
1678 raise Exception(
"PATGEN step can only run on MC")
1682 ''' Enrich the schedule with NANO ''' 1685 custom =
"nanoAOD_customizeData" if self._options.isData
else "nanoAOD_customizeMC" 1686 if self._options.runUnscheduled:
1687 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1689 self._options.customisation_file.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1690 if self._options.hltProcess:
1691 if len(self._options.customise_commands) > 1:
1692 self._options.customise_commands = self._options.customise_commands +
" \n" 1693 self._options.customise_commands = self._options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n" 1696 ''' Enrich the schedule with NANOGEN ''' 1698 fromGen =
any([x
in self.
stepMap for x
in [
'LHE',
'GEN',
'AOD']])
1701 custom =
"customizeNanoGEN" if fromGen
else "customizeNanoGENFromMini" 1702 if self._options.runUnscheduled:
1708 ''' Enrich the schedule with event interpretation ''' 1709 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1710 if sequence
in EventInterpretation:
1712 sequence =
'EIsequence' 1714 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1720 ''' Enrich the schedule with skimming fragments''' 1722 sequence = sequence.split(
'.')[-1]
1724 skimlist=sequence.split(
'+')
1726 from Configuration.Skimming.autoSkim
import autoSkim
1730 for skim
in skimConfig.__dict__:
1731 skimstream = getattr(skimConfig,skim)
1732 if isinstance(skimstream,cms.Path):
1734 self.blacklist_paths.append(skimstream)
1735 if (
not isinstance(skimstream,cms.FilteredStream)):
1737 shortname = skim.replace(
'SKIMStream',
'')
1738 if (sequence==
"all"):
1740 elif (shortname
in skimlist):
1743 if self._options.datatier==
'DQM':
1745 skimstreamDQM = cms.FilteredStream(
1746 responsible = skimstream.responsible,
1747 name = skimstream.name+
'DQM',
1748 paths = skimstream.paths,
1749 selectEvents = skimstream.selectEvents,
1750 content = self._options.datatier+
'EventContent',
1751 dataTier = cms.untracked.string(self._options.datatier)
1754 for i
in range(skimlist.count(shortname)):
1755 skimlist.remove(shortname)
1759 if (skimlist.__len__()!=0
and sequence!=
"all"):
1760 print(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1761 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1764 ''' Enrich the schedule with a user defined sequence ''' 1770 """ Enrich the schedule with the postreco step """ 1777 print(sequence,
"in preparing validation")
1779 from Validation.Configuration.autoValidation
import autoValidation
1781 sequence=sequence.split(
'.')[-1]
1782 if sequence.find(
',')!=-1:
1783 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1784 valSeqName=sequence.split(
',')[1].
split(
'+')
1789 prevalSeqName=sequence.split(
'+')
1790 valSeqName=sequence.split(
'+')
1796 postfix=
'_'+sequence
1797 prevalSeqName=[
'prevalidation'+postfix]
1798 valSeqName=[
'validation'+postfix]
1799 if not hasattr(self.
process,valSeqName[0]):
1801 valSeqName=[sequence]
1812 if (
'HLT' in self.
stepMap and not self._options.fast)
or self._options.hltProcess:
1813 for s
in valSeqName+prevalSeqName:
1816 for (i,s)
in enumerate(prevalSeqName):
1818 setattr(self.
process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.
process, s)) )
1819 self.schedule.append(getattr(self.
process,
'prevalidation_step%s'%NFI(i)))
1821 for (i,s)
in enumerate(valSeqName):
1822 setattr(self.
process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.
process, s)))
1823 self.schedule.append(getattr(self.
process,
'validation_step%s'%NFI(i)))
1829 if not 'DIGI' in self.
stepMap and not self._options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1830 if self._options.restoreRNDSeeds==
False and not self._options.restoreRNDSeeds==
True:
1831 self._options.restoreRNDSeeds=
True 1833 if not 'DIGI' in self.
stepMap and not self._options.fast:
1837 self._options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1839 if hasattr(self.
process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1841 for (i,s)
in enumerate(valSeqName):
1842 getattr(self.
process,
'validation_step%s'%NFI(i)).
insert(0, self.process.genstepfilter)
1848 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1849 It will climb down within PSets, VPSets and VInputTags to find its target""" 1850 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1857 if isinstance(pset, cms._Parameterizable):
1858 for name
in pset.parameters_().
keys():
1864 value = getattr(pset,name)
1865 type = value.pythonTypeName()
1866 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1867 self.
doIt(value,base+
"."+name)
1868 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1869 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1870 elif type
in (
'cms.string',
'cms.untracked.string'):
1874 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1875 for (i,n)
in enumerate(value):
1876 if not isinstance(n, cms.InputTag):
1883 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1884 for (i,n)
in enumerate(value):
1887 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1890 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1895 label = visitee.label()
1896 except AttributeError:
1897 label =
'<Module not in a Process>' 1899 label =
'other execption' 1900 self.
doIt(visitee, label)
1907 print(
"Replacing all InputTag %s => %s"%(oldT,newT))
1910 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 1912 self.additionalCommands.append(loadMe)
1913 self.additionalCommands.append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1917 if self._options.hltProcess:
1918 proc=self._options.hltProcess
1920 proc=self.process.name_()
1921 if proc==HLTprocess:
return 1923 print(
"replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc))
1925 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.
additionalCommands:
1926 self.additionalCommands.append(
'from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1927 self.additionalCommands.append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1933 while '@' in repr(seqList)
and level<maxLevel:
1935 for specifiedCommand
in seqList:
1936 if specifiedCommand.startswith(
'@'):
1937 location=specifiedCommand[1:]
1938 if not location
in mapping:
1939 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1940 mappedTo=mapping[location]
1942 mappedTo=mappedTo[index]
1943 seqList.remove(specifiedCommand)
1944 seqList.extend(mappedTo.split(
'+'))
1947 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1955 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1956 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1957 from DQMOffline.Configuration.autoDQM
import autoDQM
1961 if len(set(sequenceList))!=len(sequenceList):
1962 sequenceList=
list(set(sequenceList))
1963 print(
"Duplicate entries for DQM:, using",sequenceList)
1965 pathName=
'dqmoffline_step' 1966 for (i,sequence)
in enumerate(sequenceList):
1968 pathName=
'dqmoffline_%d_step'%(i)
1970 if 'HLT' in self.stepMap.keys()
or self._options.hltProcess:
1973 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process,sequence ) ) )
1974 self.schedule.append(getattr(self.
process,pathName))
1976 if hasattr(self.
process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1978 getattr(self.
process,pathName).
insert(0,self.process.genstepfilter)
1980 pathName=
'dqmofflineOnPAT_step' 1981 for (i,sequence)
in enumerate(postSequenceList):
1983 pathName=
'dqmofflineOnPAT_%d_step'%(i)
1985 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process, sequence ) ) )
1986 self.schedule.append(getattr(self.
process,pathName))
1989 """ Enrich the process with harvesting step """ 1990 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self._options.harvesting+
'_cff' 1994 sequence = sequence.split(
'.')[-1]
1997 harvestingList = sequence.split(
"+")
1998 from DQMOffline.Configuration.autoDQM
import autoDQM
1999 from Validation.Configuration.autoValidation
import autoValidation
2001 combined_mapping = copy.deepcopy( autoDQM )
2002 combined_mapping.update( autoValidation )
2003 self.
expandMapping(harvestingList,combined_mapping,index=-1)
2005 if len(set(harvestingList))!=len(harvestingList):
2006 harvestingList=
list(set(harvestingList))
2007 print(
"Duplicate entries for HARVESTING, using",harvestingList)
2009 for name
in harvestingList:
2010 if not name
in harvestingConfig.__dict__:
2011 print(name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2013 harvestingstream = getattr(harvestingConfig,name)
2014 if isinstance(harvestingstream,cms.Path):
2015 self.schedule.append(harvestingstream)
2016 self.blacklist_paths.append(harvestingstream)
2017 if isinstance(harvestingstream,cms.Sequence):
2018 setattr(self.
process,name+
"_step",cms.Path(harvestingstream))
2019 self.schedule.append(getattr(self.
process,name+
"_step"))
2025 """ Enrich the process with AlCaHarvesting step """ 2027 sequence=sequence.split(
".")[-1]
2030 harvestingList = sequence.split(
"+")
2034 from Configuration.AlCa.autoPCL
import autoPCL
2037 for name
in harvestingConfig.__dict__:
2038 harvestingstream = getattr(harvestingConfig,name)
2039 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2040 self.schedule.append(harvestingstream)
2041 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2042 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2043 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2044 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2046 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2047 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2048 harvestingList.remove(name)
2050 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2051 self.schedule.append(lastStep)
2053 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2054 print(
"The following harvesting could not be found : ", harvestingList)
2055 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2065 self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2066 self.schedule.append(self.process.reconstruction)
2070 """ Add useful info for the production. """ 2071 self.process.configurationMetadata=cms.untracked.PSet\
2072 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2073 name=cms.untracked.string(
"Applications"),
2074 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2077 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2082 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2083 self.
pythonCfgCode +=
"# with command line options: "+self._options.arguments+
'\n' 2084 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2091 if hasattr(self.
_options,
"era")
and self._options.era :
2093 from Configuration.StandardSequences.Eras
import eras
2094 for requestedEra
in self._options.era.split(
",") :
2095 modifierStrings.append(requestedEra)
2096 modifierImports.append(eras.pythonCfgLines[requestedEra])
2097 modifiers.append(getattr(eras,requestedEra))
2100 if hasattr(self.
_options,
"procModifiers")
and self._options.procModifiers:
2103 for pm
in self._options.procModifiers.split(
','):
2104 modifierStrings.append(pm)
2105 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2106 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2109 self.
pythonCfgCode +=
"process = cms.Process('"+self._options.name+
"'" 2112 if len(modifierStrings)>0:
2119 if len(modifiers)>0:
2120 self.
process = cms.Process(self._options.name,*modifiers)
2122 self.
process = cms.Process(self._options.name)
2128 """ Prepare the configuration string and add missing pieces.""" 2140 outputModuleCfgCode=
"" 2141 if not 'HARVESTING' in self.stepMap.keys()
and not 'ALCAHARVEST' in self.stepMap.keys()
and not 'ALCAOUTPUT' in self.stepMap.keys()
and self.
with_output:
2146 self.
pythonCfgCode +=
"# import of standard configurations\n" 2151 if not hasattr(self.
process,
"configurationMetadata"):
2155 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2170 nl=sorted(self.additionalOutputs.keys())
2173 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2174 tmpOut = cms.EndPath(output)
2175 setattr(self.
process,name+
'OutPath',tmpOut)
2176 self.schedule.append(tmpOut)
2184 for object
in self._options.inlineObjets.split(
','):
2187 if not hasattr(self.
process,object):
2188 print(
'cannot inline -'+object+
'- : not known')
2195 for path
in self.process.paths:
2199 for endpath
in self.process.endpaths:
2205 result =
"process.schedule = cms.Schedule(" 2208 self.process.schedule = cms.Schedule()
2210 if not isinstance(item, cms.Schedule):
2211 self.process.schedule.append(item)
2213 self.process.schedule.extend(item)
2215 if hasattr(self.
process,
"HLTSchedule"):
2216 beforeHLT = self.
schedule[:self.schedule.index(self.process.HLTSchedule)]
2217 afterHLT = self.
schedule[self.schedule.index(self.process.HLTSchedule)+1:]
2218 pathNames = [
'process.'+p.label_()
for p
in beforeHLT]
2219 result +=
','.
join(pathNames)+
')\n' 2220 result +=
'process.schedule.extend(process.HLTSchedule)\n' 2221 pathNames = [
'process.'+p.label_()
for p
in afterHLT]
2222 result +=
'process.schedule.extend(['+
','.
join(pathNames)+
'])\n' 2224 pathNames = [
'process.'+p.label_()
for p
in self.
schedule]
2225 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2230 self.process.schedule.associate(getattr(self.
process, labelToAssociate))
2231 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2235 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2238 if self._options.nThreads
is not "1":
2241 self.
pythonCfgCode +=
"process.options.numberOfThreads=cms.untracked.uint32("+self._options.nThreads+
")\n" 2242 self.
pythonCfgCode +=
"process.options.numberOfStreams=cms.untracked.uint32("+self._options.nStreams+
")\n" 2243 self.
pythonCfgCode +=
"process.options.numberOfConcurrentLuminosityBlocks=cms.untracked.uint32("+self._options.nConcurrentLumis+
")\n" 2244 self.process.options.numberOfThreads=cms.untracked.uint32(
int(self._options.nThreads))
2245 self.process.options.numberOfStreams=cms.untracked.uint32(
int(self._options.nStreams))
2246 self.process.options.numberOfConcurrentLuminosityBlocks=cms.untracked.uint32(
int(self._options.nConcurrentLumis))
2248 if self._options.isRepacked:
2250 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2251 self.
pythonCfgCode +=
"MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n" 2252 MassReplaceInputTag(self.
process, new=
"rawDataMapperByLabel", old=
"rawDataCollector")
2256 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2264 for path
in self.process.paths:
2273 if self._options.runUnscheduled:
2276 self.
pythonCfgCode+=
"#do not add changes to your config after this point (unless you know what you are doing)\n" 2277 self.
pythonCfgCode+=
"from FWCore.ParameterSet.Utilities import convertToUnscheduled\n" 2278 self.
pythonCfgCode+=
"process=convertToUnscheduled(process)\n" 2280 from FWCore.ParameterSet.Utilities
import convertToUnscheduled
2287 if hasattr(self.
process,
"logErrorHarvester"):
2289 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2290 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2291 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2292 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2299 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2300 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2301 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2303 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2306 imports = cms.specialImportRegistry.getSpecialImports()
2307 if len(imports) > 0:
2309 index = self.pythonCfgCode.find(
"import FWCore.ParameterSet.Config")
2311 index = self.pythonCfgCode.find(
"\n",index)
2317 if self._options.io:
2319 if not self._options.io.endswith(
'.io'): self._option.io+=
'.io' 2320 io=open(self._options.io,
'w')
2322 if hasattr(self.process.source,
"fileNames"):
2323 if len(self.process.source.fileNames.value()):
2324 ioJson[
'primary']=self.process.source.fileNames.value()
2325 if hasattr(self.process.source,
"secondaryFileNames"):
2326 if len(self.process.source.secondaryFileNames.value()):
2327 ioJson[
'secondary']=self.process.source.secondaryFileNames.value()
2328 if self._options.pileup_input
and (self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:')):
2329 ioJson[
'pileup']=self._options.pileup_input[4:]
2330 for (o,om)
in self.process.outputModules_().
items():
2331 ioJson[o]=om.fileName.value()
2332 ioJson[
'GT']=self.process.GlobalTag.globaltag.value()
2336 io.write(json.dumps(ioJson))
def load(self, includeFile)
def prepare_ENDJOB(self, sequence='endOfProcess')
def prepare_LHE(self, sequence=None)
def prepare_DATAMIX(self, sequence=None)
def expandMapping(self, seqList, mapping, index=None)
def prepare_SIM(self, sequence=None)
def prepare_HARVESTING(self, sequence=None)
def prepare_USER(self, sequence=None)
def prepare_GEN(self, sequence=None)
def loadDefaultOrSpecifiedCFF(self, sequence, defaultCFF)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def prepare_L1REPACK(self, sequence=None)
def finalizeFastSimHLT(self)
inliner
load the relevant part
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
def prepare_POSTRECO(self, sequence=None)
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def prepare_REPACK(self, sequence=None)
def customiseEarlyDelete(process)
def prepare_EI(self, sequence=None)
def prepare_RECOBEFMIX(self, sequence="reconstruction")
def prepare_FILTER(self, sequence=None)
S & print(S &os, JobReport::InputFile const &f)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, sequence=None)
def build_production_info(self, evt_type, evtnumber)
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def visit(visitdir)
Retrieve data from a perf suite output (sub) directory, only examines TimeSize at the moment...
def prepare_L1HwVal(self, sequence='L1HwVal')
def prepare_CFWRITER(self, sequence=None)
def prepare_RAW2DIGI(self, sequence="RawToDigi")
def prepare_RECO(self, sequence="reconstruction")
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def convertToUnscheduled(proc)
def prepare_PATGEN(self, sequence="miniGEN")
def addExtraStream(self, name, stream, workflow='full')
def prepare_DQM(self, sequence='DQMOffline')
def prepare_L1Reco(self, sequence="L1Reco")
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT')
def prepare_RECOSIM(self, sequence="recosim")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_ALCAPRODUCER(self, sequence=None)
def prepare_ALCAOUTPUT(self, sequence=None)
def addCustomiseCmdLine(self)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, sequence=None)
def prepare_DIGI2RAW(self, sequence=None)
def loadAndRemember(self, includeFile)
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_NANO(self, sequence="nanoAOD")
def prepare_SKIM(self, sequence="all")
static std::string join(char **cmd)
def prepare_ALCAHARVEST(self, sequence=None)
def dumpPython(process, name)
def prepare_L1TrackTrigger(self, sequence="L1TrackTrigger")
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_ALCA(self, sequence=None, workflow='full')
def prepare_HLT(self, sequence=None)
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_VALIDATION(self, sequence='validation')
def prepare_NANOGEN(self, sequence="nanoAOD")
def prepare_L1(self, sequence=None)
def scheduleSequenceAtEnd(self, seq, prefix)
def filesFromList(fileName, s=None)
def prepare_RAW2RECO(self, sequence=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def completeInputCommand(self)
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def executeAndRemember(self, command)
def addStandardSequences(self)
def prepare_PAT(self, sequence="miniAOD")
nextScheduleIsConditional
put the filtering path in the schedule