3 from __future__
import print_function
4 __version__ =
"$Revision: 1.19 $" 5 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 7 import FWCore.ParameterSet.Config
as cms
8 from FWCore.ParameterSet.Modules
import _Module
11 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
17 from subprocess
import Popen,PIPE
18 import FWCore.ParameterSet.DictTypes
as DictTypes
19 from FWCore.ParameterSet.OrderedSet
import OrderedSet
25 defaultOptions.datamix =
'DataOnSim' 26 defaultOptions.isMC=
False 27 defaultOptions.isData=
True 28 defaultOptions.step=
'' 29 defaultOptions.pileup=
'NoPileUp' 30 defaultOptions.pileup_input =
None 31 defaultOptions.pileup_dasoption =
'' 32 defaultOptions.geometry =
'SimDB' 33 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
34 defaultOptions.magField =
'' 35 defaultOptions.conditions =
None 36 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
37 defaultOptions.harvesting=
'AtRunEnd' 38 defaultOptions.gflash =
False 39 defaultOptions.number = -1
40 defaultOptions.number_out =
None 41 defaultOptions.arguments =
"" 42 defaultOptions.name =
"NO NAME GIVEN" 43 defaultOptions.evt_type =
"" 44 defaultOptions.filein =
"" 45 defaultOptions.dasquery=
"" 46 defaultOptions.dasoption=
"" 47 defaultOptions.secondfilein =
"" 48 defaultOptions.customisation_file = []
49 defaultOptions.customisation_file_unsch = []
50 defaultOptions.customise_commands =
"" 51 defaultOptions.inline_custom=
False 52 defaultOptions.particleTable =
'pythiapdt' 53 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
54 defaultOptions.dirin =
'' 55 defaultOptions.dirout =
'' 56 defaultOptions.filetype =
'EDM' 57 defaultOptions.fileout =
'output.root' 58 defaultOptions.filtername =
'' 59 defaultOptions.lazy_download =
False 60 defaultOptions.custom_conditions =
'' 61 defaultOptions.hltProcess =
'' 62 defaultOptions.eventcontent =
None 63 defaultOptions.datatier =
None 64 defaultOptions.inlineEventContent =
True 65 defaultOptions.inlineObjets =
'' 66 defaultOptions.hideGen=
False 67 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
68 defaultOptions.beamspot=
None 69 defaultOptions.outputDefinition =
'' 70 defaultOptions.inputCommands =
None 71 defaultOptions.outputCommands =
None 72 defaultOptions.inputEventContent =
'' 73 defaultOptions.dropDescendant =
False 74 defaultOptions.relval =
None 75 defaultOptions.profile =
None 76 defaultOptions.heap_profile =
None 77 defaultOptions.isRepacked =
False 78 defaultOptions.restoreRNDSeeds =
False 79 defaultOptions.donotDropOnInput =
'' 80 defaultOptions.python_filename =
'' 81 defaultOptions.io=
None 82 defaultOptions.lumiToProcess=
None 83 defaultOptions.fast=
False 84 defaultOptions.runsAndWeightsForMC =
None 85 defaultOptions.runsScenarioForMC =
None 86 defaultOptions.runsAndWeightsForMCIntegerWeights =
None 87 defaultOptions.runsScenarioForMCIntegerWeights =
None 88 defaultOptions.runUnscheduled =
False 89 defaultOptions.timeoutOutput =
False 90 defaultOptions.nThreads =
'1' 91 defaultOptions.nStreams =
'0' 92 defaultOptions.nConcurrentLumis =
'0' 93 defaultOptions.nConcurrentIOVs =
'0' 94 defaultOptions.accelerators =
None 98 theObject = getattr(process,name)
99 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
100 return "process."+name+
" = " + theObject.dumpPython()
101 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
102 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 104 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 107 import FWCore.ParameterSet.Config
as cms
110 for line
in open(fileName,
'r'): 111 if line.count(
".root")>=2:
113 entries=line.replace(
"\n",
"").
split()
114 prim.append(entries[0])
115 sec.append(entries[1])
116 elif (line.find(
".root")!=-1):
117 entry=line.replace(
"\n",
"")
121 prim = [f
for f
in prim
if not (f
in file_seen
or file_seen.add(f))]
123 sec = [f
for f
in sec
if not (f
in file_seen
or file_seen.add(f))]
125 if not hasattr(s,
"fileNames"):
126 s.fileNames=cms.untracked.vstring(prim)
128 s.fileNames.extend(prim)
130 if not hasattr(s,
"secondaryFileNames"):
131 s.secondaryFileNames=cms.untracked.vstring(sec)
133 s.secondaryFileNames.extend(sec)
134 print(
"found files: ",prim)
136 raise Exception(
"There are not files in input from the file list")
138 print(
"found parent files:",sec)
143 import FWCore.ParameterSet.Config
as cms
146 print(
"the query is",query)
149 while eC!=0
and count<3:
151 print(
'Sleeping, then retrying DAS')
153 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True, universal_newlines=
True)
155 tupleP = os.waitpid(p.pid, 0)
159 print(
"DAS succeeded after",count,
"attempts",eC)
161 print(
"DAS failed 3 times- I give up")
162 for line
in pipe.split(
'\n'):
163 if line.count(
".root")>=2:
165 entries=line.replace(
"\n",
"").
split()
166 prim.append(entries[0])
167 sec.append(entries[1])
168 elif (line.find(
".root")!=-1):
169 entry=line.replace(
"\n",
"")
172 prim = sorted(list(set(prim)))
173 sec = sorted(list(set(sec)))
175 if not hasattr(s,
"fileNames"):
176 s.fileNames=cms.untracked.vstring(prim)
178 s.fileNames.extend(prim)
180 if not hasattr(s,
"secondaryFileNames"):
181 s.secondaryFileNames=cms.untracked.vstring(sec)
183 s.secondaryFileNames.extend(sec)
184 print(
"found files: ",prim)
186 print(
"found parent files:",sec)
189 def anyOf(listOfKeys,dict,opt=None):
198 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
201 """The main building routines """ 203 def __init__(self, options, process = None, with_output = False, with_input = False ):
204 """options taken from old cmsDriver and optparse """ 206 options.outfile_name = options.dirout+options.fileout
210 if self.
_options.isData
and options.isMC:
211 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
217 if (hasattr(self.
_options,
"outputDefinition")
and \
218 self.
_options.outputDefinition !=
'' and \
219 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self.
_options.outputDefinition)))
or \
220 (hasattr(self.
_options,
"datatier")
and \
223 print(
"removing ENDJOB from steps since not compatible with DQMIO dataTier")
229 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
232 for step
in self.
_options.step.split(
","):
233 if step==
'':
continue 234 stepParts = step.split(
":")
235 stepName = stepParts[0]
236 if stepName
not in stepList
and not stepName.startswith(
're'):
237 raise ValueError(
"Step {} unknown. Available are {}".
format( stepName , sorted(stepList)))
238 if len(stepParts)==1:
240 elif len(stepParts)==2:
242 elif len(stepParts)==3:
243 self.
stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
245 raise ValueError(f
"Step definition {step} invalid")
253 if hasattr(self.
_options,
"no_output_flag")
and self.
_options.no_output_flag:
281 Function to add the igprof profile service so that you can dump in the middle 284 profileOpts = self.
_options.profile.split(
':')
286 profilerInterval = 100
287 profilerFormat =
None 288 profilerJobFormat =
None 294 startEvent = profileOpts.pop(0)
295 if not startEvent.isdigit():
296 raise Exception(
"%s is not a number" % startEvent)
297 profilerStart =
int(startEvent)
299 eventInterval = profileOpts.pop(0)
300 if not eventInterval.isdigit():
301 raise Exception(
"%s is not a number" % eventInterval)
302 profilerInterval =
int(eventInterval)
304 profilerFormat = profileOpts.pop(0)
307 if not profilerFormat:
308 profilerFormat =
"%s___%s___%%I.gz" % (
309 self.
_options.evt_type.replace(
"_cfi",
""),
315 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
316 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
317 elif not profilerJobFormat:
318 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 320 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
325 Function to add the jemalloc heap profile service so that you can dump in the middle 328 profileOpts = self.
_options.profile.split(
':')
330 profilerInterval = 100
331 profilerFormat =
None 332 profilerJobFormat =
None 338 startEvent = profileOpts.pop(0)
339 if not startEvent.isdigit():
340 raise Exception(
"%s is not a number" % startEvent)
341 profilerStart =
int(startEvent)
343 eventInterval = profileOpts.pop(0)
344 if not eventInterval.isdigit():
345 raise Exception(
"%s is not a number" % eventInterval)
346 profilerInterval =
int(eventInterval)
348 profilerFormat = profileOpts.pop(0)
351 if not profilerFormat:
352 profilerFormat =
"%s___%s___%%I.heap" % (
353 self.
_options.evt_type.replace(
"_cfi",
""),
359 if not profilerJobFormat
and profilerFormat.endswith(
".heap"):
360 profilerJobFormat = profilerFormat.replace(
".heap",
"_EndOfJob.heap")
361 elif not profilerJobFormat:
362 profilerJobFormat = profilerFormat +
"_EndOfJob.heap" 364 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
367 includeFile = includeFile.replace(
'/',
'.')
369 return sys.modules[includeFile]
372 """helper routine to load am memorize imports""" 375 includeFile = includeFile.replace(
'/',
'.')
378 return sys.modules[includeFile]
381 """helper routine to remember replace statements""" 383 if not command.strip().startswith(
"#"):
386 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
391 self.
process.options.Rethrow = [
'ProductNotFound']
392 self.
process.options.fileMode =
'FULLMERGE' 397 self.
process.AdaptorConfig = cms.Service(
"AdaptorConfig",
398 stats = cms.untracked.bool(
True),
399 enable = cms.untracked.bool(
True),
400 cacheHint = cms.untracked.string(
"lazy-download"),
401 readHint = cms.untracked.string(
"read-ahead-buffered")
410 self.
process.IgProfService = cms.Service(
"IgProfService",
411 reportFirstEvent = cms.untracked.int32(start),
412 reportEventInterval = cms.untracked.int32(interval),
413 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
414 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
419 self.
process.JeProfService = cms.Service(
"JeProfService",
420 reportFirstEvent = cms.untracked.int32(start),
421 reportEventInterval = cms.untracked.int32(interval),
422 reportToFileAtPostEvent = cms.untracked.string(
"%s"%(eventFormat)),
423 reportToFileAtPostEndJob = cms.untracked.string(
"%s"%(jobFormat)))
427 """Here we decide how many evts will be processed""" 434 """Here the source is built. Priority: file, generator""" 437 def filesFromOption(self):
438 for entry
in self.
_options.filein.split(
','):
440 if entry.startswith(
"filelist:"):
442 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
447 if not hasattr(self.
process.source,
"secondaryFileNames"):
448 raise Exception(
"--secondfilein not compatible with "+self.
_options.filetype+
"input type")
449 for entry
in self.
_options.secondfilein.split(
','):
451 if entry.startswith(
"filelist:"):
453 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
456 self.
process.source.secondaryFileNames.append(self.
_options.dirin+entry)
460 self.
process.source=cms.Source(
"PoolSource",
461 fileNames = cms.untracked.vstring(),
462 secondaryFileNames= cms.untracked.vstring())
463 filesFromOption(self)
464 elif self.
_options.filetype ==
"DAT":
465 self.
process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
466 filesFromOption(self)
467 elif self.
_options.filetype ==
"LHE":
468 self.
process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
469 if self.
_options.filein.startswith(
"lhe:"):
471 args=self.
_options.filein.split(
':')
473 print(
'LHE input from article ',article)
474 location=
'/store/lhe/' 476 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
477 for line
in textOfFiles:
478 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
479 self.
process.source.fileNames.append(location+article+
'/'+fileName)
482 print(
'Issue to load LHE files, please check and try again.')
485 if len(self.
process.source.fileNames)==0:
486 print(
'Issue with empty filename, but can pass line check')
489 self.
process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
491 filesFromOption(self)
493 elif self.
_options.filetype ==
"DQM":
494 self.
process.source=cms.Source(
"DQMRootSource",
495 fileNames = cms.untracked.vstring())
496 filesFromOption(self)
498 elif self.
_options.filetype ==
"DQMDAQ":
500 self.
process.source=cms.Source(
"DQMStreamerReader")
504 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
507 self.
process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
511 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
516 self.
_options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 518 self.
_options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 521 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
522 for command
in self.
_options.inputCommands.split(
','):
524 command = command.strip()
525 if command==
'':
continue 526 self.
process.source.inputCommands.append(command)
527 if not self.
_options.dropDescendant:
528 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
531 import FWCore.PythonUtilities.LumiList
as LumiList
535 if self.
process.source
is None:
536 self.
process.source=cms.Source(
"EmptySource")
542 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
543 if self.
_options.runsAndWeightsForMC:
546 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
547 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMC], str):
548 __import__(RunsAndWeights[self.
_options.runsScenarioForMC])
549 self.
runsAndWeights = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMC]].runProbabilityDistribution
554 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
561 if self.
_options.runsAndWeightsForMCIntegerWeights
or self.
_options.runsScenarioForMCIntegerWeights:
563 raise Exception(
"options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
564 if self.
_options.runsAndWeightsForMCIntegerWeights:
567 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
568 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights], str):
569 __import__(RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights])
570 self.
runsAndWeightsInt = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
576 raise Exception(
"--relval option required when using --runsAndWeightsInt")
578 from SimGeneral.Configuration.LumiToRun
import lumi_to_run
579 total_events, events_per_job = self.
_options.relval.split(
',')
581 self.
additionalCommands.
append(
"process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " +
str(lumi_to_run_mapping) +
"])")
586 """ Add output module to the process """ 590 print(
"--datatier & --eventcontent options ignored")
593 outList = eval(self.
_options.outputDefinition)
594 for (id,outDefDict)
in enumerate(outList):
595 outDefDictStr=outDefDict.__str__()
596 if not isinstance(outDefDict,dict):
597 raise Exception(
"--output needs to be passed a list of dict"+self.
_options.outputDefinition+
" is invalid")
599 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
602 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
603 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
604 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
605 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
606 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
608 if not theModuleLabel:
609 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
610 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
611 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 613 for name
in tryNames:
614 if not hasattr(self.
process,name):
617 if not theModuleLabel:
618 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
620 defaultFileName=self.
_options.outfile_name
622 defaultFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
624 theFileName=self.
_options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
625 if not theFileName.endswith(
'.root'):
629 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
630 if theStreamType==
'DQMIO': theStreamType=
'DQM' 631 if theStreamType==
'ALL':
632 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
634 theEventContent = getattr(self.
process, theStreamType+
"EventContent")
638 if theStreamType==
'ALCARECO' and not theFilterName:
639 theFilterName=
'StreamALCACombined' 642 CppType=
'PoolOutputModule' 644 CppType=
'TimeoutPoolOutputModule' 645 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 646 output = cms.OutputModule(CppType,
647 theEventContent.clone(),
648 fileName = cms.untracked.string(theFileName),
649 dataset = cms.untracked.PSet(
650 dataTier = cms.untracked.string(theTier),
651 filterName = cms.untracked.string(theFilterName))
653 if not theSelectEvent
and hasattr(self.
process,
'generation_step')
and theStreamType!=
'LHE':
654 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
655 if not theSelectEvent
and hasattr(self.
process,
'filtering_step'):
656 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
658 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
661 if not hasattr(output,
'SelectEvents'):
662 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
664 output.SelectEvents.SelectEvents.extend(getattr(self.
process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
667 if hasattr(self.
process,theModuleLabel):
668 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
670 setattr(self.
process,theModuleLabel,output)
671 outputModule=getattr(self.
process,theModuleLabel)
672 setattr(self.
process,theModuleLabel+
'_step',cms.EndPath(outputModule))
673 path=getattr(self.
process,theModuleLabel+
'_step')
676 if not self.
_options.inlineEventContent
and hasattr(self.
process,theStreamType+
"EventContent"):
677 def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process.
"+theStreamType+"EventContent.outputCommands)
"): return label
678 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
679 if theExtraOutputCommands:
680 if not isinstance(theExtraOutputCommands,list):
681 raise Exception(
"extra ouput command in --option must be a list of strings")
682 if hasattr(self.
process,theStreamType+
"EventContent"):
683 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
685 outputModule.outputCommands.extend(theExtraOutputCommands)
687 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
692 streamTypes=self.
_options.eventcontent.split(
',')
693 tiers=self.
_options.datatier.split(
',')
694 if not self.
_options.outputDefinition
and len(streamTypes)!=len(tiers):
695 raise Exception(
"number of event content arguments does not match number of datatier arguments")
701 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
702 if streamType==
'':
continue 703 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self.
_options.step:
continue 704 if streamType==
'DQMIO': streamType=
'DQM' 705 eventContent=streamType
707 if streamType ==
"NANOEDMAOD" :
708 eventContent =
"NANOAOD" 709 elif streamType ==
"NANOEDMAODSIM" :
710 eventContent =
"NANOAODSIM" 711 theEventContent = getattr(self.
process, eventContent+
"EventContent")
713 theFileName=self.
_options.outfile_name
714 theFilterName=self.
_options.filtername
716 theFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
717 theFilterName=self.
_options.filtername
718 CppType=
'PoolOutputModule' 720 CppType=
'TimeoutPoolOutputModule' 721 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 722 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 723 output = cms.OutputModule(CppType,
725 fileName = cms.untracked.string(theFileName),
726 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
727 filterName = cms.untracked.string(theFilterName)
730 if hasattr(self.
process,
"generation_step")
and streamType!=
'LHE':
731 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
732 if hasattr(self.
process,
"filtering_step"):
733 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
735 if streamType==
'ALCARECO':
736 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
738 if "MINIAOD" in streamType:
739 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
742 outputModuleName=streamType+
'output' 743 setattr(self.
process,outputModuleName,output)
744 outputModule=getattr(self.
process,outputModuleName)
745 setattr(self.
process,outputModuleName+
'_step',cms.EndPath(outputModule))
746 path=getattr(self.
process,outputModuleName+
'_step')
749 if self.
_options.outputCommands
and streamType!=
'DQM':
750 for evct
in self.
_options.outputCommands.split(
','):
751 if not evct:
continue 752 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
754 if not self.
_options.inlineEventContent:
755 tmpstreamType=streamType
756 if "NANOEDM" in tmpstreamType :
757 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
758 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
760 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
762 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
768 Add selected standard sequences to the process 772 pileupSpec=self.
_options.pileup.split(
',')[0]
775 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
776 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
777 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
781 if '.' in pileupSpec:
782 mixingDict={
'file':pileupSpec}
783 elif pileupSpec.startswith(
'file:'):
784 mixingDict={
'file':pileupSpec[5:]}
787 mixingDict=copy.copy(Mixing[pileupSpec])
788 if len(self.
_options.pileup.split(
','))>1:
789 mixingDict.update(eval(self.
_options.pileup[self.
_options.pileup.find(
',')+1:]))
792 if 'file:' in pileupSpec:
795 print(
"inlining mixing module configuration")
800 mixingDict.pop(
'file')
803 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
805 elif self.
_options.pileup_input.startswith(
"filelist:"):
808 mixingDict[
'F']=self.
_options.pileup_input.split(
',')
810 for command
in specialization:
812 if len(mixingDict)!=0:
813 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
834 stepSpec = self.
stepMap[stepName]
835 print(
"Step:", stepName,
"Spec:",stepSpec)
836 if stepName.startswith(
're'):
838 if stepName[2:]
not in self.
_options.donotDropOnInput:
839 self.
_options.inputEventContent=
'%s,%s'%(stepName.upper(),self.
_options.inputEventContent)
840 stepName=stepName[2:]
842 getattr(self,
"prepare_"+stepName)(stepSpec = getattr(self,stepName+
"DefaultSeq"))
843 elif isinstance(stepSpec, list):
844 getattr(self,
"prepare_"+stepName)(stepSpec =
'+'.
join(stepSpec))
845 elif isinstance(stepSpec, tuple):
846 getattr(self,
"prepare_"+stepName)(stepSpec =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
848 raise ValueError(
"Invalid step definition")
850 if self.
_options.restoreRNDSeeds!=
False:
852 if self.
_options.restoreRNDSeeds==
True:
853 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
855 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self.
_options.restoreRNDSeeds))
858 self.
_options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 860 self.
_options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 866 def dropSecondDropStar(iec):
876 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
877 for evct
in self.
_options.inputEventContent.split(
','):
878 if evct==
'':
continue 879 theEventContent = getattr(self.
process, evct+
"EventContent")
880 if hasattr(theEventContent,
'outputCommands'):
881 self.
process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
882 if hasattr(theEventContent,
'inputCommands'):
883 self.
process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
885 dropSecondDropStar(self.
process.source.inputCommands)
887 if not self.
_options.dropDescendant:
888 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
894 """Add conditions to the process""" 895 if not self.
_options.conditions:
return 897 if 'FrontierConditions_GlobalTag' in self.
_options.conditions:
898 print(
'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
899 self.
_options.conditions = self.
_options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
902 from Configuration.AlCa.GlobalTag
import GlobalTag
909 """Include the customise code """ 913 for c
in self.
_options.customisation_file:
914 custOpt.extend(c.split(
","))
916 for c
in self.
_options.customisation_file_unsch:
917 custOpt.extend(c.split(
","))
923 raise Exception(
"more than . in the specification:"+opt)
924 fileName=opt.split(
'.')[0]
925 if opt.count(
'.')==0: rest=
'customise' 927 rest=opt.split(
'.')[1]
928 if rest==
'py': rest=
'customise' 930 if fileName
in custMap:
931 custMap[fileName].extend(rest.split(
'+'))
933 custMap[fileName]=rest.split(
'+')
938 final_snippet=
'\n# customisation of the process.\n' 942 allFcn.extend(custMap[opt])
944 if allFcn.count(fcn)!=1:
945 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
949 packageName = f.replace(
".py",
"").
replace(
"/",
".")
950 __import__(packageName)
951 package = sys.modules[packageName]
954 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
956 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 958 for line
in file(customiseFile,
'r'): 959 if "import FWCore.ParameterSet.Config" in line:
961 final_snippet += line
963 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
964 for fcn
in custMap[f]:
965 print(
"customising the process with",fcn,
"from",f)
966 if not hasattr(package,fcn):
968 raise Exception(
"config "+f+
" has no function "+fcn)
972 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
973 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
976 final_snippet +=
'\n# End of customisation functions\n' 982 final_snippet=
'\n# Customisation from command line\n' 983 if self.
_options.customise_commands:
985 for com
in self.
_options.customise_commands.split(
'\\n'):
988 final_snippet +=
'\n'+com
999 if self.
_options.particleTable
not in defaultOptions.particleTableList:
1000 print(
'Invalid particle table provided. Options are:')
1001 print(defaultOptions.particleTable)
1009 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreams_cff" 1042 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 1080 self.
_options.beamspot=VtxSmearedDefaultKey
1085 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 1086 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 1089 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1098 if self.
_options.scenario==
'cosmics':
1100 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1101 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1102 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1112 if self.
_options.scenario==
'HeavyIons':
1114 self.
_options.beamspot=VtxSmearedHIDefaultKey
1119 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1121 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1124 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1136 self.
magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self.
_options.magField.replace(
'.',
'')+
'_cff' 1140 self.
GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff' 1144 if 'start' in self.
_options.conditions.lower():
1145 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1147 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1149 def inGeometryKeys(opt):
1150 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1151 if opt
in GeometryConf:
1152 return GeometryConf[opt]
1156 geoms=self.
_options.geometry.split(
',')
1157 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).
split(
',')
1160 if '/' in geoms[1]
or '_cff' in geoms[1]:
1163 self.
GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1165 if (geoms[0].startswith(
'DB:')):
1166 self.
SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff' 1170 if '/' in geoms[0]
or '_cff' in geoms[0]:
1173 simGeometry=geoms[0]
1175 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1177 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1180 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1181 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1184 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1189 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1190 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1197 if self.
_options.pileup==
'default':
1198 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1199 self.
_options.pileup=MixingDefaultKey
1212 output = cms.OutputModule(
"PoolOutputModule")
1213 if stream.selectEvents.parameters_().__len__()!=0:
1214 output.SelectEvents = stream.selectEvents
1216 output.SelectEvents = cms.untracked.PSet()
1217 output.SelectEvents.SelectEvents=cms.vstring()
1218 if isinstance(stream.paths,tuple):
1219 for path
in stream.paths:
1220 output.SelectEvents.SelectEvents.append(path.label())
1222 output.SelectEvents.SelectEvents.append(stream.paths.label())
1226 if isinstance(stream.content,str):
1227 evtPset=getattr(self.process,stream.content)
1228 for p
in evtPset.parameters_():
1229 setattr(output,p,getattr(evtPset,p))
1230 if not self._options.inlineEventContent:
1231 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1233 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1235 output.outputCommands = stream.content
1238 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1240 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1241 filterName = cms.untracked.string(stream.name))
1243 if self._options.filtername:
1244 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1247 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1249 if workflow
in (
"producers,full"):
1250 if isinstance(stream.paths,tuple):
1251 for path
in stream.paths:
1252 self.schedule.
append(path)
1254 self.schedule.
append(stream.paths)
1258 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1259 self.additionalOutputs[name] = output
1260 setattr(self.process,name,output)
1262 if workflow ==
'output':
1264 filterList = output.SelectEvents.SelectEvents
1265 for i, filter
in enumerate(filterList):
1266 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1276 _dotsplit = stepSpec.split(
'.')
1277 if ( len(_dotsplit)==1 ):
1278 if '/' in _dotsplit[0]:
1279 _sequence = defaultSEQ
if defaultSEQ
else stepSpec
1282 _sequence = stepSpec
1284 elif ( len(_dotsplit)==2 ):
1285 _cff,_sequence = _dotsplit
1287 print(
"sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1288 print(stepSpec,
"not recognized")
1291 return l,_sequence,_cff
1296 for i,s
in enumerate(seq.split(
'*')):
1298 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, s) ))
1300 p=getattr(self.
process,prefix)
1301 tmp = getattr(self.
process, s)
1302 if isinstance(tmp, cms.Task):
1313 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, seq) ))
1316 for i,s
in enumerate(seq.split(
'+')):
1318 setattr(self.
process,sn,getattr(cms,what)( getattr(self.
process, s) ))
1332 def prepare_ALCA(self, stepSpec = None, workflow = 'full'):
1333 """ Enrich the process with alca streams """ 1338 alcaList = sequence.split(
"+")
1339 for alca
in alcaList:
1340 if (len(alca)>MAXLEN):
1341 raise Exception(
"The following alca "+
str(alca)+
" name (with length "+
str(len(alca))+
" chars) cannot be accepted because it exceeds the DBS constraints on the length of the name of the ALCARECOs producers ("+
str(MAXLEN)+
")!")
1344 from Configuration.AlCa.autoAlca
import autoAlca, AlCaNoConcurrentLumis
1348 for name
in alcaConfig.__dict__:
1349 alcastream = getattr(alcaConfig,name)
1350 shortName = name.replace(
'ALCARECOStream',
'')
1351 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1352 if shortName
in AlCaNoConcurrentLumis:
1353 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".
format(shortName))
1354 self.
_options.nConcurrentLumis =
"1" 1355 self.
_options.nConcurrentIOVs =
"1" 1356 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1357 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1359 if 'DQM' in alcaList:
1360 if not self.
_options.inlineEventContent
and hasattr(self.
process,name):
1361 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1363 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1367 if isinstance(alcastream.paths,tuple):
1368 for path
in alcastream.paths:
1373 for i
in range(alcaList.count(shortName)):
1374 alcaList.remove(shortName)
1377 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1378 path = getattr(alcaConfig,name)
1380 alcaList.remove(
'DQM')
1382 if isinstance(alcastream,cms.Path):
1387 if len(alcaList) != 0:
1389 for name
in alcaConfig.__dict__:
1390 alcastream = getattr(alcaConfig,name)
1391 if isinstance(alcastream,cms.FilteredStream):
1392 available.append(name.replace(
'ALCARECOStream',
''))
1393 print(
"The following alcas could not be found "+
str(alcaList))
1394 print(
"available ",available)
1396 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1401 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1402 print(
"Loading lhe fragment from",loadFragment)
1403 __import__(loadFragment)
1404 self.process.
load(loadFragment)
1406 self._options.inlineObjets+=
','+stepSpec
1408 getattr(self.process,stepSpec).nEvents =
int(self._options.number)
1411 self.process.lhe_step = cms.Path( getattr( self.process,stepSpec) )
1412 self.excludedPaths.
append(
"lhe_step")
1413 self.schedule.
append( self.process.lhe_step )
1416 """ load the fragment of generator configuration """ 1423 if not '/' in loadFragment:
1424 loadFragment=
'Configuration.Generator.'+loadFragment
1426 loadFragment=loadFragment.replace(
'/',
'.')
1428 print(
"Loading generator fragment from",loadFragment)
1429 __import__(loadFragment)
1434 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1437 from Configuration.Generator.concurrentLumisDisable
import noConcurrentLumiGenerators
1439 generatorModule=sys.modules[loadFragment]
1440 genModules=generatorModule.__dict__
1451 import FWCore.ParameterSet.Modules
as cmstypes
1452 for name
in genModules:
1453 theObject = getattr(generatorModule,name)
1454 if isinstance(theObject, cmstypes._Module):
1456 if theObject.type_()
in noConcurrentLumiGenerators:
1457 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".
format(theObject.type_()))
1458 self.
_options.nConcurrentLumis =
"1" 1459 self.
_options.nConcurrentIOVs =
"1" 1460 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1461 self.
_options.inlineObjets+=
','+name
1463 if stepSpec == self.
GENDefaultSeq or stepSpec ==
'pgen_genonly' or stepSpec ==
'pgen_smear':
1464 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1466 elif 'generator' in genModules:
1469 """ Enrich the schedule with the rest of the generation step """ 1474 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1475 cffToBeLoaded=VtxSmeared[self.
_options.beamspot]
1478 raise Exception(
"VertexSmearing type or beamspot "+self.
_options.beamspot+
" unknown.")
1480 if self.
_options.scenario ==
'HeavyIons':
1481 if self.
_options.pileup==
'HiMixGEN':
1482 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1483 elif self.
_options.pileup==
'HiMixEmbGEN':
1484 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorEmbMix_cff")
1486 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1488 self.
process.generation_step = cms.Path( getattr(self.
process,_genSeqName) )
1492 self.
executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1498 """ Enrich the schedule with the summary of the filter step """ 1505 """ Enrich the schedule with the simulation step""" 1515 self.
executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1521 """ Enrich the schedule with the digitisation step""" 1525 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1527 if _digiSeq ==
'pdigi_valid' or _digiSeq ==
'pdigi_hi':
1528 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1530 if _digiSeq !=
'pdigi_nogen' and _digiSeq !=
'pdigi_valid_nogen' and _digiSeq !=
'pdigi_hi_nogen' and not self.
process.source.type_()==
'EmptySource' and not self.
_options.filetype ==
"LHE":
1531 if self.
_options.inputEventContent==
'':
1532 self.
_options.inputEventContent=
'REGEN' 1541 """ Enrich the schedule with the crossing frame writer step""" 1547 """ Enrich the schedule with the digitisation step""" 1553 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
1555 elif self.
_options.pileup_input.startswith(
"filelist:"):
1558 theFiles=self.
_options.pileup_input.split(
',')
1560 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1575 """ Enrich the schedule with the L1 simulation step""" 1582 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1583 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1584 if stepSpec
in supported:
1585 self.
loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'% stepSpec)
1586 if self.
_options.scenario ==
'HeavyIons':
1590 print(
"L1REPACK with '",stepSpec,
"' is not supported! Supported choices are: ",supported)
1594 """ Enrich the schedule with the HLT simulation step""" 1596 print(
"no specification of the hlt menu has been given, should never happen")
1597 raise Exception(
'no HLT specifications provided')
1601 from Configuration.HLT.autoHLT
import autoHLT
1604 stepSpec = autoHLT[key]
1606 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1612 if self.
_options.scenario ==
'HeavyIons':
1613 optionsForHLT[
'type'] =
'HIon' 1615 optionsForHLT[
'type'] =
'GRun' 1616 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in optionsForHLT.items())
1617 if stepSpec ==
'run,fromSource':
1618 if hasattr(self.
process.source,
'firstRun'):
1619 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1620 elif hasattr(self.
process.source,
'setRunNumber'):
1621 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1623 raise Exception(f
'Cannot replace menu to load {stepSpec}')
1625 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(stepSpec.replace(
',',
':'),optionsForHLTConfig))
1630 self.
_options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1636 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1639 if self.
process.schedule ==
None:
1640 raise Exception(
'the HLT step did not attach a valid schedule to the process')
1647 if not hasattr(self.
process,
'HLTEndSequence'):
1648 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1653 seqReco,seqDigi=stepSpec.spli(
',')
1655 print(f
"RAW2RECO requires two specifications {stepSpec} insufficient")
1669 for filt
in allMetFilterPaths:
1673 ''' Enrich the schedule with L1 HW validation ''' 1675 print(
'\n\n\n DEPRECATED this has no action \n\n\n')
1679 ''' Enrich the schedule with L1 reconstruction ''' 1685 ''' Enrich the schedule with L1 reconstruction ''' 1691 ''' Enrich the schedule with a user defined filter sequence ''' 1693 filterConfig,filterSeq = stepSpec.split(
'.')
1694 filterConfig=self.
load(filterConfig)
1696 class PrintAllModules(
object):
1700 def enter(self,visitee):
1702 label=visitee.label()
1707 def leave(self,v):
pass 1709 expander=PrintAllModules()
1710 getattr(self.
process,filterSeq).visit( expander )
1711 self.
_options.inlineObjets+=
','+expander.inliner
1712 self.
_options.inlineObjets+=
','+filterSeq
1723 ''' Enrich the schedule with reconstruction ''' 1729 ''' Enrich the schedule with reconstruction ''' 1735 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1737 print(
"ERROR: this step is only implemented for FastSim")
1744 ''' Enrich the schedule with PAT ''' 1749 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1752 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1754 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1757 if len(self.
_options.customise_commands) > 1:
1758 self.
_options.customise_commands = self.
_options.customise_commands +
" \n" 1759 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patTrigger.processName = \""+self.
_options.hltProcess+
"\"\n" 1760 self.
_options.customise_commands = self.
_options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1761 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1768 ''' Enrich the schedule with PATGEN ''' 1772 raise Exception(
"PATGEN step can only run on MC")
1777 ''' Enrich the schedule with NANO ''' 1780 custom =
"nanoAOD_customizeCommon" 1781 self.
_options.customisation_file.insert(0,
'.'.
join([_nanoCff,custom]))
1783 if len(self.
_options.customise_commands) > 1:
1784 self.
_options.customise_commands = self.
_options.customise_commands +
" \n" 1785 self.
_options.customise_commands = self.
_options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1788 ''' Enrich the schedule with NANOGEN ''' 1790 fromGen =
any([x
in self.
stepMap for x
in [
'LHE',
'GEN',
'AOD']])
1793 custom =
"customizeNanoGEN" if fromGen
else "customizeNanoGENFromMini" 1795 self.
_options.customisation_file_unsch.insert(0,
'.'.
join([_nanogenCff, custom]))
1797 self.
_options.customisation_file.insert(0,
'.'.
join([_nanogenCff, custom]))
1800 ''' Enrich the schedule with skimming fragments''' 1803 stdHLTProcName =
'HLT' 1804 newHLTProcName = self.
_options.hltProcess
1805 customiseForReHLT = (newHLTProcName
or (stdHLTProcName
in self.
stepMap))
and (newHLTProcName != stdHLTProcName)
1806 if customiseForReHLT:
1807 print(
"replacing %s process name - step SKIM:%s will use '%s'" % (stdHLTProcName, sequence, newHLTProcName))
1810 from Configuration.Skimming.autoSkim
import autoSkim
1811 skimlist = sequence.split(
'+')
1815 for skim
in skimConfig.__dict__:
1816 skimstream = getattr(skimConfig, skim)
1819 if isinstance(skimstream, cms.Path):
1822 elif isinstance(skimstream, cms.Sequence):
1823 if customiseForReHLT:
1826 if not isinstance(skimstream, cms.FilteredStream):
1829 shortname = skim.replace(
'SKIMStream',
'')
1830 if (sequence==
"all"):
1832 elif (shortname
in skimlist):
1837 skimstreamDQM = cms.FilteredStream(
1838 responsible = skimstream.responsible,
1839 name = skimstream.name+
'DQM',
1840 paths = skimstream.paths,
1841 selectEvents = skimstream.selectEvents,
1842 content = self.
_options.datatier+
'EventContent',
1843 dataTier = cms.untracked.string(self.
_options.datatier)
1846 for i
in range(skimlist.count(shortname)):
1847 skimlist.remove(shortname)
1849 if (skimlist.__len__()!=0
and sequence!=
"all"):
1850 print(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1851 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1855 ''' Enrich the schedule with a user defined sequence ''' 1861 """ Enrich the schedule with the postreco step """ 1868 print(f
"{stepSpec} in preparing validation")
1870 from Validation.Configuration.autoValidation
import autoValidation
1872 if sequence.find(
',')!=-1:
1873 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1874 valSeqName=sequence.split(
',')[1].
split(
'+')
1879 prevalSeqName=sequence.split(
'+')
1880 valSeqName=sequence.split(
'+')
1886 postfix=
'_'+sequence
1887 prevalSeqName=[
'prevalidation'+postfix]
1888 valSeqName=[
'validation'+postfix]
1889 if not hasattr(self.
process,valSeqName[0]):
1891 valSeqName=[sequence]
1903 for s
in valSeqName+prevalSeqName:
1906 for (i,s)
in enumerate(prevalSeqName):
1908 setattr(self.
process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.
process, s)) )
1911 for (i,s)
in enumerate(valSeqName):
1912 setattr(self.
process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.
process, s)))
1919 if not 'DIGI' in self.
stepMap and not self.
_options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1920 if self.
_options.restoreRNDSeeds==
False and not self.
_options.restoreRNDSeeds==
True:
1927 self.
_options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1929 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
1931 for (i,s)
in enumerate(valSeqName):
1938 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1939 It will climb down within PSets, VPSets and VInputTags to find its target""" 1940 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1946 def doIt(self, pset, base):
1947 if isinstance(pset, cms._Parameterizable):
1948 for name
in pset.parameters_().
keys():
1954 value = getattr(pset, name)
1955 valueType = type(value)
1956 if valueType
in [cms.PSet, cms.untracked.PSet, cms.EDProducer]:
1957 self.
doIt(value,base+
"."+name)
1958 elif valueType
in [cms.VPSet, cms.untracked.VPSet]:
1959 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1960 elif valueType
in [cms.string, cms.untracked.string]:
1964 elif valueType
in [cms.VInputTag, cms.untracked.VInputTag]:
1965 for (i,n)
in enumerate(value):
1966 if not isinstance(n, cms.InputTag):
1973 elif valueType
in [cms.vstring, cms.untracked.vstring]:
1974 for (i,n)
in enumerate(value):
1977 elif valueType
in [cms.InputTag, cms.untracked.InputTag]:
1980 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1982 def enter(self,visitee):
1985 label = visitee.label()
1986 except AttributeError:
1987 label =
'<Module not in a Process>' 1989 label =
'other execption' 1990 self.
doIt(visitee, label)
1992 def leave(self,visitee):
1997 print(
"Replacing all InputTag %s => %s"%(oldT,newT))
2000 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 2003 self.
additionalCommands.
append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
2009 if proc == HLTprocess:
2012 if verbosityLevel > 0:
2013 print(
"replacing %s process name - sequence %s will use '%s'" % (HLTprocess, sequence, proc))
2014 verboseVisit = (verbosityLevel > 1)
2015 getattr(self.
process,sequence).visit(
2017 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.
additionalCommands:
2020 'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",), verbose = %s))' 2021 % (sequence, HLTprocess, proc, verboseVisit))
2026 while '@' in repr(seqList)
and level<maxLevel:
2028 for specifiedCommand
in seqList:
2029 if specifiedCommand.startswith(
'@'):
2030 location=specifiedCommand[1:]
2031 if not location
in mapping:
2032 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
2033 mappedTo=mapping[location]
2035 mappedTo=mappedTo[index]
2036 seqList.remove(specifiedCommand)
2037 seqList.extend(mappedTo.split(
'+'))
2040 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
2048 sequenceList=_dqmSeq.split(
'+')
2049 postSequenceList=_dqmSeq.split(
'+')
2050 from DQMOffline.Configuration.autoDQM
import autoDQM
2054 if len(set(sequenceList))!=len(sequenceList):
2056 print(
"Duplicate entries for DQM:, using",sequenceList)
2058 pathName=
'dqmoffline_step' 2059 for (i,_sequence)
in enumerate(sequenceList):
2061 pathName=
'dqmoffline_%d_step'%(i)
2066 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process,_sequence ) ) )
2069 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
2074 pathName=
'dqmofflineOnPAT_step' 2075 for (i,_sequence)
in enumerate(postSequenceList):
2077 if (sequenceList[i]==postSequenceList[i]):
2080 pathName=
'dqmofflineOnPAT_%d_step'%(i)
2082 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process, _sequence ) ) )
2086 """ Enrich the process with harvesting step """ 2087 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self.
_options.harvesting+
'_cff' 2093 harvestingList = sequence.split(
"+")
2094 from DQMOffline.Configuration.autoDQM
import autoDQM
2095 from Validation.Configuration.autoValidation
import autoValidation
2097 combined_mapping = copy.deepcopy( autoDQM )
2098 combined_mapping.update( autoValidation )
2099 self.
expandMapping(harvestingList,combined_mapping,index=-1)
2101 if len(set(harvestingList))!=len(harvestingList):
2102 harvestingList=list(
OrderedSet(harvestingList))
2103 print(
"Duplicate entries for HARVESTING, using",harvestingList)
2105 for name
in harvestingList:
2106 if not name
in harvestingConfig.__dict__:
2107 print(name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2111 harvestingstream = getattr(harvestingConfig,name)
2112 if isinstance(harvestingstream,cms.Path):
2115 if isinstance(harvestingstream,cms.Sequence):
2116 setattr(self.
process,name+
"_step",cms.Path(harvestingstream))
2127 """ Enrich the process with AlCaHarvesting step """ 2129 sequence=stepSpec.split(
".")[-1]
2132 harvestingList = sequence.split(
"+")
2136 from Configuration.AlCa.autoPCL
import autoPCL
2139 for name
in harvestingConfig.__dict__:
2140 harvestingstream = getattr(harvestingConfig,name)
2141 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2143 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2144 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2145 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2146 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2148 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2149 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2150 harvestingList.remove(name)
2152 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2155 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2156 print(
"The following harvesting could not be found : ", harvestingList)
2157 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2172 """ Add useful info for the production. """ 2173 self.
process.configurationMetadata=cms.untracked.PSet\
2174 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2175 name=cms.untracked.string(
"Applications"),
2176 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2184 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2186 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2195 from Configuration.StandardSequences.Eras
import eras
2196 for requestedEra
in self.
_options.era.split(
",") :
2197 modifierStrings.append(requestedEra)
2198 modifierImports.append(eras.pythonCfgLines[requestedEra])
2199 modifiers.append(getattr(eras,requestedEra))
2205 for c
in self.
_options.procModifiers:
2206 thingsImported.extend(c.split(
","))
2207 for pm
in thingsImported:
2208 modifierStrings.append(pm)
2209 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2210 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2216 if len(modifierStrings)>0:
2223 if len(modifiers)>0:
2231 def prepare(self, doChecking = False):
2232 """ Prepare the configuration string and add missing pieces.""" 2244 outputModuleCfgCode=
"" 2250 self.
pythonCfgCode +=
"# import of standard configurations\n" 2255 if not hasattr(self.
process,
"configurationMetadata"):
2277 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2278 tmpOut = cms.EndPath(output)
2279 setattr(self.
process,name+
'OutPath',tmpOut)
2288 for object
in self.
_options.inlineObjets.split(
','):
2291 if not hasattr(self.
process,object):
2292 print(
'cannot inline -'+object+
'- : not known')
2297 if self.
_options.pileup==
'HiMixEmbGEN':
2298 self.
pythonCfgCode +=
"\nprocess.generator.embeddingMode=cms.int32(1)\n" 2302 for path
in self.
process.paths:
2306 for endpath
in self.
process.endpaths:
2314 pathNames = [
'process.'+p.label_()
for p
in self.
schedule]
2315 if self.
process.schedule ==
None:
2316 self.
process.schedule = cms.Schedule()
2318 self.
process.schedule.append(item)
2319 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2322 raise Exception(
'the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2324 for index, item
in enumerate(self.
schedule):
2326 self.
process.schedule.insert(index, item)
2328 self.
process.schedule.append(item)
2330 result =
"# process.schedule imported from cff in HLTrigger.Configuration\n" 2332 result +=
'process.schedule.insert('+
str(index)+
', '+item+
')\n' 2339 self.
process.schedule.associate(getattr(self.
process, labelToAssociate))
2340 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2344 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2347 overrideThreads = (self.
_options.nThreads !=
"1")
2348 overrideConcurrentLumis = (self.
_options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2349 overrideConcurrentIOVs = (self.
_options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2351 if overrideThreads
or overrideConcurrentLumis
or overrideConcurrentIOVs:
2359 if overrideConcurrentLumis:
2360 self.
pythonCfgCode +=
"process.options.numberOfConcurrentLuminosityBlocks = "+self.
_options.nConcurrentLumis+
"\n" 2361 self.
process.options.numberOfConcurrentLuminosityBlocks =
int(self.
_options.nConcurrentLumis)
2362 if overrideConcurrentIOVs:
2363 self.
pythonCfgCode +=
"process.options.eventSetup.numberOfConcurrentIOVs = "+self.
_options.nConcurrentIOVs+
"\n" 2364 self.
process.options.eventSetup.numberOfConcurrentIOVs =
int(self.
_options.nConcurrentIOVs)
2366 if self.
_options.accelerators
is not None:
2367 accelerators = self.
_options.accelerators.split(
',')
2369 self.
pythonCfgCode +=
"# Enable only these accelerator backends\n" 2370 self.
pythonCfgCode +=
"process.load('Configuration.StandardSequences.Accelerators_cff')\n" 2371 self.
pythonCfgCode +=
"process.options.accelerators = ['" +
"', '".
join(accelerators) +
"']\n" 2372 self.
process.
load(
'Configuration.StandardSequences.Accelerators_cff')
2373 self.
process.options.accelerators = accelerators
2378 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2379 self.
pythonCfgCode +=
"MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n" 2380 MassReplaceInputTag(self.
process, new=
"rawDataMapperByLabel", old=
"rawDataCollector")
2384 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2392 for path
in self.
process.paths:
2402 print(
"--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2411 if hasattr(self.
process,
"logErrorHarvester"):
2413 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2414 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2415 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2416 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2423 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2424 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2425 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2427 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2430 imports = cms.specialImportRegistry.getSpecialImports()
2431 if len(imports) > 0:
2443 if not self.
_options.io.endswith(
'.io'): self._option.io+=
'.io' 2446 if hasattr(self.
process.source,
"fileNames"):
2447 if len(self.
process.source.fileNames.value()):
2448 ioJson[
'primary']=self.
process.source.fileNames.value()
2449 if hasattr(self.
process.source,
"secondaryFileNames"):
2450 if len(self.
process.source.secondaryFileNames.value()):
2451 ioJson[
'secondary']=self.
process.source.secondaryFileNames.value()
2452 if self.
_options.pileup_input
and (self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:')):
2453 ioJson[
'pileup']=self.
_options.pileup_input[4:]
2455 ioJson[o]=om.fileName.value()
2456 ioJson[
'GT']=self.
process.GlobalTag.globaltag.value()
2460 io.write(json.dumps(ioJson))
2463 def load(self, includeFile)
def prepare_L1(self, stepSpec=None)
def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ='')
def expandMapping(self, seqList, mapping, index=None)
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT', verbosityLevel=1)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def finalizeFastSimHLT(self)
inliner
load the relevant part
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def customiseEarlyDelete(process)
def prepare_CFWRITER(self, stepSpec=None)
def prepare_RECOBEFMIX(self, stepSpec="reconstruction")
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, stepSpec=None)
def build_production_info(self, evt_type, evtnumber)
def prepare_RECOSIM(self, stepSpec="recosim")
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def prepare_L1HwVal(self, stepSpec='L1HwVal')
def prepare_DIGI2RAW(self, stepSpec=None)
def prepare_POSTRECO(self, stepSpec=None)
def prepare_SKIM(self, stepSpec="all")
def prepare_ALCAPRODUCER(self, stepSpec=None)
def prepare_HARVESTING(self, stepSpec=None)
def prepare_ALCAOUTPUT(self, stepSpec=None)
def prepare_RAW2DIGI(self, stepSpec="RawToDigi")
def prepare_GEN(self, stepSpec=None)
def prepare_FILTER(self, stepSpec=None)
scheduleIndexOfFirstHLTPath
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
def prepare_PAT(self, stepSpec="miniAOD")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_L1Reco(self, stepSpec="L1Reco")
def prepare_HLT(self, stepSpec=None)
def addCustomiseCmdLine(self)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, stepSpec=None)
def loadAndRemember(self, includeFile)
def prepare_ENDJOB(self, stepSpec='endOfProcess')
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_DQM(self, stepSpec='DQMOffline')
def prepare_ALCAHARVEST(self, stepSpec=None)
def split(sequence, size)
static std::string join(char **cmd)
def prepare_USER(self, stepSpec=None)
def prepare_ALCA(self, stepSpec=None, workflow='full')
def dumpPython(process, name)
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_REPACK(self, stepSpec=None)
def prepare_NANOGEN(self, stepSpec="nanoAOD")
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_L1REPACK(self, stepSpec=None)
def prepare_L1TrackTrigger(self, stepSpec="L1TrackTrigger")
def prepare_RAW2RECO(self, stepSpec=None)
def prepare_NANO(self, stepSpec='')
def prepare_VALIDATION(self, stepSpec='validation')
def lumi_to_run(runs, events_in_sample, events_per_job)
def scheduleSequenceAtEnd(self, seq, prefix)
def prepare_RECO(self, stepSpec="reconstruction")
def prepare_SIM(self, stepSpec=None)
def filesFromList(fileName, s=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def heapProfileOptions(self)
def prepare_PATGEN(self, stepSpec="miniGEN")
def prepare_LHE(self, stepSpec=None)
def completeInputCommand(self)
def prepare_DATAMIX(self, stepSpec=None)
def executeAndRemember(self, command)
def addStandardSequences(self)
nextScheduleIsConditional
put the filtering path in the schedule