3 from __future__
import print_function
4 __version__ =
"$Revision: 1.19 $" 5 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $" 7 import FWCore.ParameterSet.Config
as cms
8 from FWCore.ParameterSet.Modules
import _Module
11 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
17 from subprocess
import Popen,PIPE
18 import FWCore.ParameterSet.DictTypes
as DictTypes
24 defaultOptions.datamix =
'DataOnSim' 25 defaultOptions.isMC=
False 26 defaultOptions.isData=
True 27 defaultOptions.step=
'' 28 defaultOptions.pileup=
'NoPileUp' 29 defaultOptions.pileup_input =
None 30 defaultOptions.pileup_dasoption =
'' 31 defaultOptions.geometry =
'SimDB' 32 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
33 defaultOptions.magField =
'' 34 defaultOptions.conditions =
None 35 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
36 defaultOptions.harvesting=
'AtRunEnd' 37 defaultOptions.gflash =
False 38 defaultOptions.number = -1
39 defaultOptions.number_out =
None 40 defaultOptions.arguments =
"" 41 defaultOptions.name =
"NO NAME GIVEN" 42 defaultOptions.evt_type =
"" 43 defaultOptions.filein =
"" 44 defaultOptions.dasquery=
"" 45 defaultOptions.dasoption=
"" 46 defaultOptions.secondfilein =
"" 47 defaultOptions.customisation_file = []
48 defaultOptions.customisation_file_unsch = []
49 defaultOptions.customise_commands =
"" 50 defaultOptions.inline_custom=
False 51 defaultOptions.particleTable =
'pythiapdt' 52 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
53 defaultOptions.dirin =
'' 54 defaultOptions.dirout =
'' 55 defaultOptions.filetype =
'EDM' 56 defaultOptions.fileout =
'output.root' 57 defaultOptions.filtername =
'' 58 defaultOptions.lazy_download =
False 59 defaultOptions.custom_conditions =
'' 60 defaultOptions.hltProcess =
'' 61 defaultOptions.eventcontent =
None 62 defaultOptions.datatier =
None 63 defaultOptions.inlineEventContent =
True 64 defaultOptions.inlineObjets =
'' 65 defaultOptions.hideGen=
False 66 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
67 defaultOptions.beamspot=
None 68 defaultOptions.outputDefinition =
'' 69 defaultOptions.inputCommands =
None 70 defaultOptions.outputCommands =
None 71 defaultOptions.inputEventContent =
'' 72 defaultOptions.dropDescendant =
False 73 defaultOptions.relval =
None 74 defaultOptions.profile =
None 75 defaultOptions.heap_profile =
None 76 defaultOptions.isRepacked =
False 77 defaultOptions.restoreRNDSeeds =
False 78 defaultOptions.donotDropOnInput =
'' 79 defaultOptions.python_filename =
'' 80 defaultOptions.io=
None 81 defaultOptions.lumiToProcess=
None 82 defaultOptions.fast=
False 83 defaultOptions.runsAndWeightsForMC =
None 84 defaultOptions.runsScenarioForMC =
None 85 defaultOptions.runsAndWeightsForMCIntegerWeights =
None 86 defaultOptions.runsScenarioForMCIntegerWeights =
None 87 defaultOptions.runUnscheduled =
False 88 defaultOptions.timeoutOutput =
False 89 defaultOptions.nThreads =
'1' 90 defaultOptions.nStreams =
'0' 91 defaultOptions.nConcurrentLumis =
'0' 92 defaultOptions.nConcurrentIOVs =
'0' 93 defaultOptions.accelerators =
None 97 theObject = getattr(process,name)
98 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
99 return "process."+name+
" = " + theObject.dumpPython()
100 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
101 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 103 return "process."+name+
" = " + theObject.dumpPython()+
"\n" 106 import FWCore.ParameterSet.Config
as cms
109 for line
in open(fileName,
'r'): 110 if line.count(
".root")>=2:
112 entries=line.replace(
"\n",
"").
split()
113 prim.append(entries[0])
114 sec.append(entries[1])
115 elif (line.find(
".root")!=-1):
116 entry=line.replace(
"\n",
"")
120 prim = [f
for f
in prim
if not (f
in file_seen
or file_seen.add(f))]
122 sec = [f
for f
in sec
if not (f
in file_seen
or file_seen.add(f))]
124 if not hasattr(s,
"fileNames"):
125 s.fileNames=cms.untracked.vstring(prim)
127 s.fileNames.extend(prim)
129 if not hasattr(s,
"secondaryFileNames"):
130 s.secondaryFileNames=cms.untracked.vstring(sec)
132 s.secondaryFileNames.extend(sec)
133 print(
"found files: ",prim)
135 raise Exception(
"There are not files in input from the file list")
137 print(
"found parent files:",sec)
142 import FWCore.ParameterSet.Config
as cms
145 print(
"the query is",query)
148 while eC!=0
and count<3:
150 print(
'Sleeping, then retrying DAS')
152 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True, universal_newlines=
True)
154 tupleP = os.waitpid(p.pid, 0)
158 print(
"DAS succeeded after",count,
"attempts",eC)
160 print(
"DAS failed 3 times- I give up")
161 for line
in pipe.split(
'\n'):
162 if line.count(
".root")>=2:
164 entries=line.replace(
"\n",
"").
split()
165 prim.append(entries[0])
166 sec.append(entries[1])
167 elif (line.find(
".root")!=-1):
168 entry=line.replace(
"\n",
"")
171 prim = sorted(list(set(prim)))
172 sec = sorted(list(set(sec)))
174 if not hasattr(s,
"fileNames"):
175 s.fileNames=cms.untracked.vstring(prim)
177 s.fileNames.extend(prim)
179 if not hasattr(s,
"secondaryFileNames"):
180 s.secondaryFileNames=cms.untracked.vstring(sec)
182 s.secondaryFileNames.extend(sec)
183 print(
"found files: ",prim)
185 print(
"found parent files:",sec)
188 def anyOf(listOfKeys,dict,opt=None):
197 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
200 """The main building routines """ 202 def __init__(self, options, process = None, with_output = False, with_input = False ):
203 """options taken from old cmsDriver and optparse """ 205 options.outfile_name = options.dirout+options.fileout
209 if self.
_options.isData
and options.isMC:
210 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
216 if (hasattr(self.
_options,
"outputDefinition")
and \
217 self.
_options.outputDefinition !=
'' and \
218 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self.
_options.outputDefinition)))
or \
219 (hasattr(self.
_options,
"datatier")
and \
222 print(
"removing ENDJOB from steps since not compatible with DQMIO dataTier")
228 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
231 for step
in self.
_options.step.split(
","):
232 if step==
'':
continue 233 stepParts = step.split(
":")
234 stepName = stepParts[0]
235 if stepName
not in stepList
and not stepName.startswith(
're'):
236 raise ValueError(
"Step {} unknown. Available are {}".
format( stepName , sorted(stepList)))
237 if len(stepParts)==1:
239 elif len(stepParts)==2:
241 elif len(stepParts)==3:
242 self.
stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
244 raise ValueError(f
"Step definition {step} invalid")
252 if hasattr(self.
_options,
"no_output_flag")
and self.
_options.no_output_flag:
280 Function to add the igprof profile service so that you can dump in the middle 283 profileOpts = self.
_options.profile.split(
':')
285 profilerInterval = 100
286 profilerFormat =
None 287 profilerJobFormat =
None 293 startEvent = profileOpts.pop(0)
294 if not startEvent.isdigit():
295 raise Exception(
"%s is not a number" % startEvent)
296 profilerStart =
int(startEvent)
298 eventInterval = profileOpts.pop(0)
299 if not eventInterval.isdigit():
300 raise Exception(
"%s is not a number" % eventInterval)
301 profilerInterval =
int(eventInterval)
303 profilerFormat = profileOpts.pop(0)
306 if not profilerFormat:
307 profilerFormat =
"%s___%s___%%I.gz" % (
308 self.
_options.evt_type.replace(
"_cfi",
""),
314 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
315 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
316 elif not profilerJobFormat:
317 profilerJobFormat = profilerFormat +
"_EndOfJob.gz" 319 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
324 Function to add the jemalloc heap profile service so that you can dump in the middle 327 profileOpts = self.
_options.profile.split(
':')
329 profilerInterval = 100
330 profilerFormat =
None 331 profilerJobFormat =
None 337 startEvent = profileOpts.pop(0)
338 if not startEvent.isdigit():
339 raise Exception(
"%s is not a number" % startEvent)
340 profilerStart =
int(startEvent)
342 eventInterval = profileOpts.pop(0)
343 if not eventInterval.isdigit():
344 raise Exception(
"%s is not a number" % eventInterval)
345 profilerInterval =
int(eventInterval)
347 profilerFormat = profileOpts.pop(0)
350 if not profilerFormat:
351 profilerFormat =
"%s___%s___%%I.heap" % (
352 self.
_options.evt_type.replace(
"_cfi",
""),
358 if not profilerJobFormat
and profilerFormat.endswith(
".heap"):
359 profilerJobFormat = profilerFormat.replace(
".heap",
"_EndOfJob.heap")
360 elif not profilerJobFormat:
361 profilerJobFormat = profilerFormat +
"_EndOfJob.heap" 363 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
366 includeFile = includeFile.replace(
'/',
'.')
368 return sys.modules[includeFile]
371 """helper routine to load am memorize imports""" 374 includeFile = includeFile.replace(
'/',
'.')
377 return sys.modules[includeFile]
380 """helper routine to remember replace statements""" 382 if not command.strip().startswith(
"#"):
385 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
390 self.
process.options.Rethrow = [
'ProductNotFound']
391 self.
process.options.fileMode =
'FULLMERGE' 396 self.
process.AdaptorConfig = cms.Service(
"AdaptorConfig",
397 stats = cms.untracked.bool(
True),
398 enable = cms.untracked.bool(
True),
399 cacheHint = cms.untracked.string(
"lazy-download"),
400 readHint = cms.untracked.string(
"read-ahead-buffered")
409 self.
process.IgProfService = cms.Service(
"IgProfService",
410 reportFirstEvent = cms.untracked.int32(start),
411 reportEventInterval = cms.untracked.int32(interval),
412 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
413 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
418 self.
process.JeProfService = cms.Service(
"JeProfService",
419 reportFirstEvent = cms.untracked.int32(start),
420 reportEventInterval = cms.untracked.int32(interval),
421 reportToFileAtPostEvent = cms.untracked.string(
"%s"%(eventFormat)),
422 reportToFileAtPostEndJob = cms.untracked.string(
"%s"%(jobFormat)))
426 """Here we decide how many evts will be processed""" 433 """Here the source is built. Priority: file, generator""" 436 def filesFromOption(self):
437 for entry
in self.
_options.filein.split(
','):
439 if entry.startswith(
"filelist:"):
441 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
446 if not hasattr(self.
process.source,
"secondaryFileNames"):
447 raise Exception(
"--secondfilein not compatible with "+self.
_options.filetype+
"input type")
448 for entry
in self.
_options.secondfilein.split(
','):
450 if entry.startswith(
"filelist:"):
452 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
455 self.
process.source.secondaryFileNames.append(self.
_options.dirin+entry)
459 self.
process.source=cms.Source(
"PoolSource",
460 fileNames = cms.untracked.vstring(),
461 secondaryFileNames= cms.untracked.vstring())
462 filesFromOption(self)
463 elif self.
_options.filetype ==
"DAT":
464 self.
process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
465 filesFromOption(self)
466 elif self.
_options.filetype ==
"LHE":
467 self.
process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
468 if self.
_options.filein.startswith(
"lhe:"):
470 args=self.
_options.filein.split(
':')
472 print(
'LHE input from article ',article)
473 location=
'/store/lhe/' 475 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
476 for line
in textOfFiles:
477 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
478 self.
process.source.fileNames.append(location+article+
'/'+fileName)
481 print(
'Issue to load LHE files, please check and try again.')
484 if len(self.
process.source.fileNames)==0:
485 print(
'Issue with empty filename, but can pass line check')
488 self.
process.source.skipEvents = cms.untracked.uint32(
int(args[2]))
490 filesFromOption(self)
492 elif self.
_options.filetype ==
"DQM":
493 self.
process.source=cms.Source(
"DQMRootSource",
494 fileNames = cms.untracked.vstring())
495 filesFromOption(self)
497 elif self.
_options.filetype ==
"DQMDAQ":
499 self.
process.source=cms.Source(
"DQMStreamerReader")
503 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
506 self.
process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
510 self.
process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
515 self.
_options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,' 517 self.
_options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,' 520 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
521 for command
in self.
_options.inputCommands.split(
','):
523 command = command.strip()
524 if command==
'':
continue 525 self.
process.source.inputCommands.append(command)
526 if not self.
_options.dropDescendant:
527 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
530 import FWCore.PythonUtilities.LumiList
as LumiList
534 if self.
process.source
is None:
535 self.
process.source=cms.Source(
"EmptySource")
541 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
542 if self.
_options.runsAndWeightsForMC:
545 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
546 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMC], str):
547 __import__(RunsAndWeights[self.
_options.runsScenarioForMC])
548 self.
runsAndWeights = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMC]].runProbabilityDistribution
553 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
560 if self.
_options.runsAndWeightsForMCIntegerWeights
or self.
_options.runsScenarioForMCIntegerWeights:
562 raise Exception(
"options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
563 if self.
_options.runsAndWeightsForMCIntegerWeights:
566 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
567 if isinstance(RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights], str):
568 __import__(RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights])
569 self.
runsAndWeightsInt = sys.modules[RunsAndWeights[self.
_options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
575 raise Exception(
"--relval option required when using --runsAndWeightsInt")
577 from SimGeneral.Configuration.LumiToRun
import lumi_to_run
578 total_events, events_per_job = self.
_options.relval.split(
',')
580 self.
additionalCommands.
append(
"process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " +
str(lumi_to_run_mapping) +
"])")
585 """ Add output module to the process """ 589 print(
"--datatier & --eventcontent options ignored")
592 outList = eval(self.
_options.outputDefinition)
593 for (id,outDefDict)
in enumerate(outList):
594 outDefDictStr=outDefDict.__str__()
595 if not isinstance(outDefDict,dict):
596 raise Exception(
"--output needs to be passed a list of dict"+self.
_options.outputDefinition+
" is invalid")
598 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
601 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
602 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
603 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
604 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
605 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
607 if not theModuleLabel:
608 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
609 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
610 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output' 612 for name
in tryNames:
613 if not hasattr(self.
process,name):
616 if not theModuleLabel:
617 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
619 defaultFileName=self.
_options.outfile_name
621 defaultFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
623 theFileName=self.
_options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
624 if not theFileName.endswith(
'.root'):
628 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
629 if theStreamType==
'DQMIO': theStreamType=
'DQM' 630 if theStreamType==
'ALL':
631 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
633 theEventContent = getattr(self.
process, theStreamType+
"EventContent")
637 if theStreamType==
'ALCARECO' and not theFilterName:
638 theFilterName=
'StreamALCACombined' 641 CppType=
'PoolOutputModule' 643 CppType=
'TimeoutPoolOutputModule' 644 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule' 645 output = cms.OutputModule(CppType,
646 theEventContent.clone(),
647 fileName = cms.untracked.string(theFileName),
648 dataset = cms.untracked.PSet(
649 dataTier = cms.untracked.string(theTier),
650 filterName = cms.untracked.string(theFilterName))
652 if not theSelectEvent
and hasattr(self.
process,
'generation_step')
and theStreamType!=
'LHE':
653 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
654 if not theSelectEvent
and hasattr(self.
process,
'filtering_step'):
655 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
657 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
660 if not hasattr(output,
'SelectEvents'):
661 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
663 output.SelectEvents.SelectEvents.extend(getattr(self.
process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
666 if hasattr(self.
process,theModuleLabel):
667 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
669 setattr(self.
process,theModuleLabel,output)
670 outputModule=getattr(self.
process,theModuleLabel)
671 setattr(self.
process,theModuleLabel+
'_step',cms.EndPath(outputModule))
672 path=getattr(self.
process,theModuleLabel+
'_step')
675 if not self.
_options.inlineEventContent
and hasattr(self.
process,theStreamType+
"EventContent"):
676 def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process.
"+theStreamType+"EventContent.outputCommands)
"): return label
677 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
678 if theExtraOutputCommands:
679 if not isinstance(theExtraOutputCommands,list):
680 raise Exception(
"extra ouput command in --option must be a list of strings")
681 if hasattr(self.
process,theStreamType+
"EventContent"):
682 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
684 outputModule.outputCommands.extend(theExtraOutputCommands)
686 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
691 streamTypes=self.
_options.eventcontent.split(
',')
692 tiers=self.
_options.datatier.split(
',')
693 if not self.
_options.outputDefinition
and len(streamTypes)!=len(tiers):
694 raise Exception(
"number of event content arguments does not match number of datatier arguments")
700 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
701 if streamType==
'':
continue 702 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self.
_options.step:
continue 703 if streamType==
'DQMIO': streamType=
'DQM' 704 eventContent=streamType
706 if streamType ==
"NANOEDMAOD" :
707 eventContent =
"NANOAOD" 708 elif streamType ==
"NANOEDMAODSIM" :
709 eventContent =
"NANOAODSIM" 710 theEventContent = getattr(self.
process, eventContent+
"EventContent")
712 theFileName=self.
_options.outfile_name
713 theFilterName=self.
_options.filtername
715 theFileName=self.
_options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
716 theFilterName=self.
_options.filtername
717 CppType=
'PoolOutputModule' 719 CppType=
'TimeoutPoolOutputModule' 720 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule' 721 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule' 722 output = cms.OutputModule(CppType,
724 fileName = cms.untracked.string(theFileName),
725 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
726 filterName = cms.untracked.string(theFilterName)
729 if hasattr(self.
process,
"generation_step")
and streamType!=
'LHE':
730 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
731 if hasattr(self.
process,
"filtering_step"):
732 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
734 if streamType==
'ALCARECO':
735 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
737 if "MINIAOD" in streamType:
738 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
741 outputModuleName=streamType+
'output' 742 setattr(self.
process,outputModuleName,output)
743 outputModule=getattr(self.
process,outputModuleName)
744 setattr(self.
process,outputModuleName+
'_step',cms.EndPath(outputModule))
745 path=getattr(self.
process,outputModuleName+
'_step')
748 if self.
_options.outputCommands
and streamType!=
'DQM':
749 for evct
in self.
_options.outputCommands.split(
','):
750 if not evct:
continue 751 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
753 if not self.
_options.inlineEventContent:
754 tmpstreamType=streamType
755 if "NANOEDM" in tmpstreamType :
756 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
757 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
759 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
761 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
767 Add selected standard sequences to the process 771 pileupSpec=self.
_options.pileup.split(
',')[0]
774 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
775 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
776 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
780 if '.' in pileupSpec:
781 mixingDict={
'file':pileupSpec}
782 elif pileupSpec.startswith(
'file:'):
783 mixingDict={
'file':pileupSpec[5:]}
786 mixingDict=copy.copy(Mixing[pileupSpec])
787 if len(self.
_options.pileup.split(
','))>1:
788 mixingDict.update(eval(self.
_options.pileup[self.
_options.pileup.find(
',')+1:]))
791 if 'file:' in pileupSpec:
794 print(
"inlining mixing module configuration")
799 mixingDict.pop(
'file')
802 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
804 elif self.
_options.pileup_input.startswith(
"filelist:"):
807 mixingDict[
'F']=self.
_options.pileup_input.split(
',')
809 for command
in specialization:
811 if len(mixingDict)!=0:
812 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
833 stepSpec = self.
stepMap[stepName]
834 print(
"Step:", stepName,
"Spec:",stepSpec)
835 if stepName.startswith(
're'):
837 if stepName[2:]
not in self.
_options.donotDropOnInput:
838 self.
_options.inputEventContent=
'%s,%s'%(stepName.upper(),self.
_options.inputEventContent)
839 stepName=stepName[2:]
841 getattr(self,
"prepare_"+stepName)(stepSpec = getattr(self,stepName+
"DefaultSeq"))
842 elif isinstance(stepSpec, list):
843 getattr(self,
"prepare_"+stepName)(stepSpec =
'+'.
join(stepSpec))
844 elif isinstance(stepSpec, tuple):
845 getattr(self,
"prepare_"+stepName)(stepSpec =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
847 raise ValueError(
"Invalid step definition")
849 if self.
_options.restoreRNDSeeds!=
False:
851 if self.
_options.restoreRNDSeeds==
True:
852 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
854 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self.
_options.restoreRNDSeeds))
857 self.
_options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,' 859 self.
_options.inputCommands=
'keep *_randomEngineStateProducer_*_*,' 865 def dropSecondDropStar(iec):
875 if not hasattr(self.
process.source,
'inputCommands'): self.
process.source.inputCommands=cms.untracked.vstring()
876 for evct
in self.
_options.inputEventContent.split(
','):
877 if evct==
'':
continue 878 theEventContent = getattr(self.
process, evct+
"EventContent")
879 if hasattr(theEventContent,
'outputCommands'):
880 self.
process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
881 if hasattr(theEventContent,
'inputCommands'):
882 self.
process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
884 dropSecondDropStar(self.
process.source.inputCommands)
886 if not self.
_options.dropDescendant:
887 self.
process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
893 """Add conditions to the process""" 894 if not self.
_options.conditions:
return 896 if 'FrontierConditions_GlobalTag' in self.
_options.conditions:
897 print(
'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
898 self.
_options.conditions = self.
_options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
901 from Configuration.AlCa.GlobalTag
import GlobalTag
908 """Include the customise code """ 912 for c
in self.
_options.customisation_file:
913 custOpt.extend(c.split(
","))
915 for c
in self.
_options.customisation_file_unsch:
916 custOpt.extend(c.split(
","))
922 raise Exception(
"more than . in the specification:"+opt)
923 fileName=opt.split(
'.')[0]
924 if opt.count(
'.')==0: rest=
'customise' 926 rest=opt.split(
'.')[1]
927 if rest==
'py': rest=
'customise' 929 if fileName
in custMap:
930 custMap[fileName].extend(rest.split(
'+'))
932 custMap[fileName]=rest.split(
'+')
937 final_snippet=
'\n# customisation of the process.\n' 941 allFcn.extend(custMap[opt])
943 if allFcn.count(fcn)!=1:
944 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
948 packageName = f.replace(
".py",
"").
replace(
"/",
".")
949 __import__(packageName)
950 package = sys.modules[packageName]
953 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
955 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n' 957 for line
in file(customiseFile,
'r'): 958 if "import FWCore.ParameterSet.Config" in line:
960 final_snippet += line
962 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
963 for fcn
in custMap[f]:
964 print(
"customising the process with",fcn,
"from",f)
965 if not hasattr(package,fcn):
967 raise Exception(
"config "+f+
" has no function "+fcn)
971 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
972 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
975 final_snippet +=
'\n# End of customisation functions\n' 981 final_snippet=
'\n# Customisation from command line\n' 982 if self.
_options.customise_commands:
984 for com
in self.
_options.customise_commands.split(
'\\n'):
987 final_snippet +=
'\n'+com
998 if self.
_options.particleTable
not in defaultOptions.particleTableList:
999 print(
'Invalid particle table provided. Options are:')
1000 print(defaultOptions.particleTable)
1008 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreams_cff" 1041 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff' 1079 self.
_options.beamspot=VtxSmearedDefaultKey
1084 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff" 1085 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff" 1088 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff" 1097 if self.
_options.scenario==
'cosmics':
1099 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff" 1100 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff" 1101 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff" 1111 if self.
_options.scenario==
'HeavyIons':
1113 self.
_options.beamspot=VtxSmearedHIDefaultKey
1118 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff" 1120 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff" 1123 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff" 1135 self.
magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self.
_options.magField.replace(
'.',
'')+
'_cff' 1139 self.
GeometryCFF=
'Configuration/StandardSequences/GeometryRecoDB_cff' 1143 if 'start' in self.
_options.conditions.lower():
1144 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff' 1146 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff' 1148 def inGeometryKeys(opt):
1149 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1150 if opt
in GeometryConf:
1151 return GeometryConf[opt]
1155 geoms=self.
_options.geometry.split(
',')
1156 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).
split(
',')
1159 if '/' in geoms[1]
or '_cff' in geoms[1]:
1162 self.
GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff' 1164 if (geoms[0].startswith(
'DB:')):
1165 self.
SimGeometryCFF=
'Configuration/StandardSequences/GeometrySimDB_cff' 1169 if '/' in geoms[0]
or '_cff' in geoms[0]:
1172 simGeometry=geoms[0]
1174 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff' 1176 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff' 1179 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1180 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff" 1183 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff" 1188 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff' 1189 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff' 1196 if self.
_options.pileup==
'default':
1197 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1198 self.
_options.pileup=MixingDefaultKey
1211 output = cms.OutputModule(
"PoolOutputModule")
1212 if stream.selectEvents.parameters_().__len__()!=0:
1213 output.SelectEvents = stream.selectEvents
1215 output.SelectEvents = cms.untracked.PSet()
1216 output.SelectEvents.SelectEvents=cms.vstring()
1217 if isinstance(stream.paths,tuple):
1218 for path
in stream.paths:
1219 output.SelectEvents.SelectEvents.append(path.label())
1221 output.SelectEvents.SelectEvents.append(stream.paths.label())
1225 if isinstance(stream.content,str):
1226 evtPset=getattr(self.process,stream.content)
1227 for p
in evtPset.parameters_():
1228 setattr(output,p,getattr(evtPset,p))
1229 if not self._options.inlineEventContent:
1230 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1232 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1234 output.outputCommands = stream.content
1237 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1239 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1240 filterName = cms.untracked.string(stream.name))
1242 if self._options.filtername:
1243 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1246 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1248 if workflow
in (
"producers,full"):
1249 if isinstance(stream.paths,tuple):
1250 for path
in stream.paths:
1251 self.schedule.
append(path)
1253 self.schedule.
append(stream.paths)
1257 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1258 self.additionalOutputs[name] = output
1259 setattr(self.process,name,output)
1261 if workflow ==
'output':
1263 filterList = output.SelectEvents.SelectEvents
1264 for i, filter
in enumerate(filterList):
1265 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1275 _dotsplit = stepSpec.split(
'.')
1276 if ( len(_dotsplit)==1 ):
1277 if '/' in _dotsplit[0]:
1278 _sequence = defaultSEQ
if defaultSEQ
else stepSpec
1281 _sequence = stepSpec
1283 elif ( len(_dotsplit)==2 ):
1284 _cff,_sequence = _dotsplit
1286 print(
"sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1287 print(stepSpec,
"not recognized")
1290 return l,_sequence,_cff
1295 for i,s
in enumerate(seq.split(
'*')):
1297 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, s) ))
1299 p=getattr(self.
process,prefix)
1300 tmp = getattr(self.
process, s)
1301 if isinstance(tmp, cms.Task):
1312 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, seq) ))
1315 for i,s
in enumerate(seq.split(
'+')):
1317 setattr(self.
process,sn,getattr(cms,what)( getattr(self.
process, s) ))
1331 def prepare_ALCA(self, stepSpec = None, workflow = 'full'):
1332 """ Enrich the process with alca streams """ 1337 alcaList = sequence.split(
"+")
1338 for alca
in alcaList:
1339 if (len(alca)>MAXLEN):
1340 raise Exception(
"The following alca "+
str(alca)+
" name (with length "+
str(len(alca))+
" chars) cannot be accepted because it exceeds the DBS constraints on the length of the name of the ALCARECOs producers ("+
str(MAXLEN)+
")!")
1343 from Configuration.AlCa.autoAlca
import autoAlca, AlCaNoConcurrentLumis
1347 for name
in alcaConfig.__dict__:
1348 alcastream = getattr(alcaConfig,name)
1349 shortName = name.replace(
'ALCARECOStream',
'')
1350 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1351 if shortName
in AlCaNoConcurrentLumis:
1352 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".
format(shortName))
1353 self.
_options.nConcurrentLumis =
"1" 1354 self.
_options.nConcurrentIOVs =
"1" 1355 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1356 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1358 if 'DQM' in alcaList:
1359 if not self.
_options.inlineEventContent
and hasattr(self.
process,name):
1360 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1362 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1366 if isinstance(alcastream.paths,tuple):
1367 for path
in alcastream.paths:
1372 for i
in range(alcaList.count(shortName)):
1373 alcaList.remove(shortName)
1376 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1377 path = getattr(alcaConfig,name)
1379 alcaList.remove(
'DQM')
1381 if isinstance(alcastream,cms.Path):
1386 if len(alcaList) != 0:
1388 for name
in alcaConfig.__dict__:
1389 alcastream = getattr(alcaConfig,name)
1390 if isinstance(alcastream,cms.FilteredStream):
1391 available.append(name.replace(
'ALCARECOStream',
''))
1392 print(
"The following alcas could not be found "+
str(alcaList))
1393 print(
"available ",available)
1395 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1400 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1401 print(
"Loading lhe fragment from",loadFragment)
1402 __import__(loadFragment)
1403 self.process.
load(loadFragment)
1405 self._options.inlineObjets+=
','+stepSpec
1407 getattr(self.process,stepSpec).nEvents =
int(self._options.number)
1410 self.process.lhe_step = cms.Path( getattr( self.process,stepSpec) )
1411 self.excludedPaths.
append(
"lhe_step")
1412 self.schedule.
append( self.process.lhe_step )
1415 """ load the fragment of generator configuration """ 1422 if not '/' in loadFragment:
1423 loadFragment=
'Configuration.Generator.'+loadFragment
1425 loadFragment=loadFragment.replace(
'/',
'.')
1427 print(
"Loading generator fragment from",loadFragment)
1428 __import__(loadFragment)
1433 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1436 from Configuration.Generator.concurrentLumisDisable
import noConcurrentLumiGenerators
1438 generatorModule=sys.modules[loadFragment]
1439 genModules=generatorModule.__dict__
1450 import FWCore.ParameterSet.Modules
as cmstypes
1451 for name
in genModules:
1452 theObject = getattr(generatorModule,name)
1453 if isinstance(theObject, cmstypes._Module):
1455 if theObject.type_()
in noConcurrentLumiGenerators:
1456 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".
format(theObject.type_()))
1457 self.
_options.nConcurrentLumis =
"1" 1458 self.
_options.nConcurrentIOVs =
"1" 1459 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1460 self.
_options.inlineObjets+=
','+name
1462 if stepSpec == self.
GENDefaultSeq or stepSpec ==
'pgen_genonly':
1463 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1465 elif 'generator' in genModules:
1468 """ Enrich the schedule with the rest of the generation step """ 1473 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1474 cffToBeLoaded=VtxSmeared[self.
_options.beamspot]
1477 raise Exception(
"VertexSmearing type or beamspot "+self.
_options.beamspot+
" unknown.")
1479 if self.
_options.scenario ==
'HeavyIons':
1480 if self.
_options.pileup==
'HiMixGEN':
1481 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1482 elif self.
_options.pileup==
'HiMixEmbGEN':
1483 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorEmbMix_cff")
1485 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1487 self.
process.generation_step = cms.Path( getattr(self.
process,_genSeqName) )
1491 self.
executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1493 if 'reGEN' in self.
stepMap or stepSpec ==
'pgen_smear':
1497 """ Enrich the schedule with the summary of the filter step """ 1504 """ Enrich the schedule with the simulation step""" 1514 self.
executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1520 """ Enrich the schedule with the digitisation step""" 1524 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1526 if _digiSeq ==
'pdigi_valid' or _digiSeq ==
'pdigi_hi':
1527 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1529 if _digiSeq !=
'pdigi_nogen' and _digiSeq !=
'pdigi_valid_nogen' and _digiSeq !=
'pdigi_hi_nogen' and not self.
process.source.type_()==
'EmptySource' and not self.
_options.filetype ==
"LHE":
1530 if self.
_options.inputEventContent==
'':
1531 self.
_options.inputEventContent=
'REGEN' 1540 """ Enrich the schedule with the crossing frame writer step""" 1546 """ Enrich the schedule with the digitisation step""" 1552 if self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:'):
1554 elif self.
_options.pileup_input.startswith(
"filelist:"):
1557 theFiles=self.
_options.pileup_input.split(
',')
1559 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1574 """ Enrich the schedule with the L1 simulation step""" 1581 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection""" 1582 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1583 if stepSpec
in supported:
1584 self.
loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'% stepSpec)
1585 if self.
_options.scenario ==
'HeavyIons':
1589 print(
"L1REPACK with '",stepSpec,
"' is not supported! Supported choices are: ",supported)
1593 """ Enrich the schedule with the HLT simulation step""" 1595 print(
"no specification of the hlt menu has been given, should never happen")
1596 raise Exception(
'no HLT specifications provided')
1600 from Configuration.HLT.autoHLT
import autoHLT
1603 stepSpec = autoHLT[key]
1605 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1611 if self.
_options.scenario ==
'HeavyIons':
1612 optionsForHLT[
'type'] =
'HIon' 1614 optionsForHLT[
'type'] =
'GRun' 1615 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in optionsForHLT.items())
1616 if stepSpec ==
'run,fromSource':
1617 if hasattr(self.
process.source,
'firstRun'):
1618 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1619 elif hasattr(self.
process.source,
'setRunNumber'):
1620 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1622 raise Exception(f
'Cannot replace menu to load {stepSpec}')
1624 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(stepSpec.replace(
',',
':'),optionsForHLTConfig))
1629 self.
_options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1635 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1638 if self.
process.schedule ==
None:
1639 raise Exception(
'the HLT step did not attach a valid schedule to the process')
1646 if not hasattr(self.
process,
'HLTEndSequence'):
1647 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1652 seqReco,seqDigi=stepSpec.spli(
',')
1654 print(f
"RAW2RECO requires two specifications {stepSpec} insufficient")
1668 for filt
in allMetFilterPaths:
1672 ''' Enrich the schedule with L1 HW validation ''' 1674 print(
'\n\n\n DEPRECATED this has no action \n\n\n')
1678 ''' Enrich the schedule with L1 reconstruction ''' 1684 ''' Enrich the schedule with L1 reconstruction ''' 1690 ''' Enrich the schedule with a user defined filter sequence ''' 1692 filterConfig,filterSeq = stepSpec.split(
'.')
1693 filterConfig=self.
load(filterConfig)
1695 class PrintAllModules(
object):
1699 def enter(self,visitee):
1701 label=visitee.label()
1706 def leave(self,v):
pass 1708 expander=PrintAllModules()
1709 getattr(self.
process,filterSeq).visit( expander )
1710 self.
_options.inlineObjets+=
','+expander.inliner
1711 self.
_options.inlineObjets+=
','+filterSeq
1722 ''' Enrich the schedule with reconstruction ''' 1728 ''' Enrich the schedule with reconstruction ''' 1734 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim''' 1736 print(
"ERROR: this step is only implemented for FastSim")
1743 ''' Enrich the schedule with PAT ''' 1748 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1751 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1753 self.
_options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1756 if len(self.
_options.customise_commands) > 1:
1757 self.
_options.customise_commands = self.
_options.customise_commands +
" \n" 1758 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patTrigger.processName = \""+self.
_options.hltProcess+
"\"\n" 1759 self.
_options.customise_commands = self.
_options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1760 self.
_options.customise_commands = self.
_options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1767 ''' Enrich the schedule with PATGEN ''' 1771 raise Exception(
"PATGEN step can only run on MC")
1776 ''' Enrich the schedule with NANO ''' 1779 custom =
"nanoAOD_customizeCommon" 1780 self.
_options.customisation_file.insert(0,
'.'.
join([_nanoCff,custom]))
1782 if len(self.
_options.customise_commands) > 1:
1783 self.
_options.customise_commands = self.
_options.customise_commands +
" \n" 1784 self.
_options.customise_commands = self.
_options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self.
_options.hltProcess+
"' )\n" 1787 ''' Enrich the schedule with NANOGEN ''' 1789 fromGen =
any([x
in self.
stepMap for x
in [
'LHE',
'GEN',
'AOD']])
1792 custom =
"customizeNanoGEN" if fromGen
else "customizeNanoGENFromMini" 1794 self.
_options.customisation_file_unsch.insert(0,
'.'.
join([_nanogenCff, custom]))
1796 self.
_options.customisation_file.insert(0,
'.'.
join([_nanogenCff, custom]))
1799 ''' Enrich the schedule with skimming fragments''' 1802 stdHLTProcName =
'HLT' 1803 newHLTProcName = self.
_options.hltProcess
1804 customiseForReHLT = (newHLTProcName
or (stdHLTProcName
in self.
stepMap))
and (newHLTProcName != stdHLTProcName)
1805 if customiseForReHLT:
1806 print(
"replacing %s process name - step SKIM:%s will use '%s'" % (stdHLTProcName, sequence, newHLTProcName))
1809 from Configuration.Skimming.autoSkim
import autoSkim
1810 skimlist = sequence.split(
'+')
1814 for skim
in skimConfig.__dict__:
1815 skimstream = getattr(skimConfig, skim)
1818 if isinstance(skimstream, cms.Path):
1821 elif isinstance(skimstream, cms.Sequence):
1822 if customiseForReHLT:
1825 if not isinstance(skimstream, cms.FilteredStream):
1828 shortname = skim.replace(
'SKIMStream',
'')
1829 if (sequence==
"all"):
1831 elif (shortname
in skimlist):
1836 skimstreamDQM = cms.FilteredStream(
1837 responsible = skimstream.responsible,
1838 name = skimstream.name+
'DQM',
1839 paths = skimstream.paths,
1840 selectEvents = skimstream.selectEvents,
1841 content = self.
_options.datatier+
'EventContent',
1842 dataTier = cms.untracked.string(self.
_options.datatier)
1845 for i
in range(skimlist.count(shortname)):
1846 skimlist.remove(shortname)
1848 if (skimlist.__len__()!=0
and sequence!=
"all"):
1849 print(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1850 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1854 ''' Enrich the schedule with a user defined sequence ''' 1860 """ Enrich the schedule with the postreco step """ 1867 print(f
"{stepSpec} in preparing validation")
1869 from Validation.Configuration.autoValidation
import autoValidation
1871 if sequence.find(
',')!=-1:
1872 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1873 valSeqName=sequence.split(
',')[1].
split(
'+')
1878 prevalSeqName=sequence.split(
'+')
1879 valSeqName=sequence.split(
'+')
1885 postfix=
'_'+sequence
1886 prevalSeqName=[
'prevalidation'+postfix]
1887 valSeqName=[
'validation'+postfix]
1888 if not hasattr(self.
process,valSeqName[0]):
1890 valSeqName=[sequence]
1902 for s
in valSeqName+prevalSeqName:
1905 for (i,s)
in enumerate(prevalSeqName):
1907 setattr(self.
process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.
process, s)) )
1910 for (i,s)
in enumerate(valSeqName):
1911 setattr(self.
process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.
process, s)))
1918 if not 'DIGI' in self.
stepMap and not self.
_options.fast
and not any(
map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1919 if self.
_options.restoreRNDSeeds==
False and not self.
_options.restoreRNDSeeds==
True:
1926 self.
_options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1928 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
1930 for (i,s)
in enumerate(valSeqName):
1937 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value 1938 It will climb down within PSets, VPSets and VInputTags to find its target""" 1939 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1945 def doIt(self, pset, base):
1946 if isinstance(pset, cms._Parameterizable):
1947 for name
in pset.parameters_().
keys():
1953 value = getattr(pset, name)
1954 valueType = type(value)
1955 if valueType
in [cms.PSet, cms.untracked.PSet, cms.EDProducer]:
1956 self.
doIt(value,base+
"."+name)
1957 elif valueType
in [cms.VPSet, cms.untracked.VPSet]:
1958 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1959 elif valueType
in [cms.string, cms.untracked.string]:
1963 elif valueType
in [cms.VInputTag, cms.untracked.VInputTag]:
1964 for (i,n)
in enumerate(value):
1965 if not isinstance(n, cms.InputTag):
1972 elif valueType
in [cms.vstring, cms.untracked.vstring]:
1973 for (i,n)
in enumerate(value):
1976 elif valueType
in [cms.InputTag, cms.untracked.InputTag]:
1979 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1981 def enter(self,visitee):
1984 label = visitee.label()
1985 except AttributeError:
1986 label =
'<Module not in a Process>' 1988 label =
'other execption' 1989 self.
doIt(visitee, label)
1991 def leave(self,visitee):
1996 print(
"Replacing all InputTag %s => %s"%(oldT,newT))
1999 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag' 2002 self.
additionalCommands.
append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
2008 if proc == HLTprocess:
2011 if verbosityLevel > 0:
2012 print(
"replacing %s process name - sequence %s will use '%s'" % (HLTprocess, sequence, proc))
2013 verboseVisit = (verbosityLevel > 1)
2014 getattr(self.
process,sequence).visit(
2016 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.
additionalCommands:
2019 'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",), verbose = %s))' 2020 % (sequence, HLTprocess, proc, verboseVisit))
2025 while '@' in repr(seqList)
and level<maxLevel:
2027 for specifiedCommand
in seqList:
2028 if specifiedCommand.startswith(
'@'):
2029 location=specifiedCommand[1:]
2030 if not location
in mapping:
2031 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
2032 mappedTo=mapping[location]
2034 mappedTo=mappedTo[index]
2035 seqList.remove(specifiedCommand)
2036 seqList.extend(mappedTo.split(
'+'))
2039 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
2047 sequenceList=_dqmSeq.split(
'+')
2048 postSequenceList=_dqmSeq.split(
'+')
2049 from DQMOffline.Configuration.autoDQM
import autoDQM
2053 if len(set(sequenceList))!=len(sequenceList):
2054 sequenceList=list(set(sequenceList))
2055 print(
"Duplicate entries for DQM:, using",sequenceList)
2057 pathName=
'dqmoffline_step' 2058 for (i,_sequence)
in enumerate(sequenceList):
2060 pathName=
'dqmoffline_%d_step'%(i)
2065 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process,_sequence ) ) )
2068 if hasattr(self.
process,
"genstepfilter")
and len(self.
process.genstepfilter.triggerConditions):
2073 pathName=
'dqmofflineOnPAT_step' 2074 for (i,_sequence)
in enumerate(postSequenceList):
2076 if (sequenceList[i]==postSequenceList[i]):
2079 pathName=
'dqmofflineOnPAT_%d_step'%(i)
2081 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process, _sequence ) ) )
2085 """ Enrich the process with harvesting step """ 2086 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self.
_options.harvesting+
'_cff' 2092 harvestingList = sequence.split(
"+")
2093 from DQMOffline.Configuration.autoDQM
import autoDQM
2094 from Validation.Configuration.autoValidation
import autoValidation
2096 combined_mapping = copy.deepcopy( autoDQM )
2097 combined_mapping.update( autoValidation )
2098 self.
expandMapping(harvestingList,combined_mapping,index=-1)
2100 if len(set(harvestingList))!=len(harvestingList):
2101 harvestingList=list(set(harvestingList))
2102 print(
"Duplicate entries for HARVESTING, using",harvestingList)
2104 for name
in harvestingList:
2105 if not name
in harvestingConfig.__dict__:
2106 print(name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2110 harvestingstream = getattr(harvestingConfig,name)
2111 if isinstance(harvestingstream,cms.Path):
2114 if isinstance(harvestingstream,cms.Sequence):
2115 setattr(self.
process,name+
"_step",cms.Path(harvestingstream))
2126 """ Enrich the process with AlCaHarvesting step """ 2128 sequence=stepSpec.split(
".")[-1]
2131 harvestingList = sequence.split(
"+")
2135 from Configuration.AlCa.autoPCL
import autoPCL
2138 for name
in harvestingConfig.__dict__:
2139 harvestingstream = getattr(harvestingConfig,name)
2140 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2142 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2143 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2144 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2145 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2147 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2148 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2149 harvestingList.remove(name)
2151 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2154 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2155 print(
"The following harvesting could not be found : ", harvestingList)
2156 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2171 """ Add useful info for the production. """ 2172 self.
process.configurationMetadata=cms.untracked.PSet\
2173 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2174 name=cms.untracked.string(
"Applications"),
2175 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2183 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n' 2185 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n" 2194 from Configuration.StandardSequences.Eras
import eras
2195 for requestedEra
in self.
_options.era.split(
",") :
2196 modifierStrings.append(requestedEra)
2197 modifierImports.append(eras.pythonCfgLines[requestedEra])
2198 modifiers.append(getattr(eras,requestedEra))
2204 for c
in self.
_options.procModifiers:
2205 thingsImported.extend(c.split(
","))
2206 for pm
in thingsImported:
2207 modifierStrings.append(pm)
2208 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2209 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2215 if len(modifierStrings)>0:
2222 if len(modifiers)>0:
2230 def prepare(self, doChecking = False):
2231 """ Prepare the configuration string and add missing pieces.""" 2243 outputModuleCfgCode=
"" 2249 self.
pythonCfgCode +=
"# import of standard configurations\n" 2254 if not hasattr(self.
process,
"configurationMetadata"):
2276 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2277 tmpOut = cms.EndPath(output)
2278 setattr(self.
process,name+
'OutPath',tmpOut)
2287 for object
in self.
_options.inlineObjets.split(
','):
2290 if not hasattr(self.
process,object):
2291 print(
'cannot inline -'+object+
'- : not known')
2296 if self.
_options.pileup==
'HiMixEmbGEN':
2297 self.
pythonCfgCode +=
"\nprocess.generator.embeddingMode=cms.int32(1)\n" 2301 for path
in self.
process.paths:
2305 for endpath
in self.
process.endpaths:
2313 pathNames = [
'process.'+p.label_()
for p
in self.
schedule]
2314 if self.
process.schedule ==
None:
2315 self.
process.schedule = cms.Schedule()
2317 self.
process.schedule.append(item)
2318 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n' 2321 raise Exception(
'the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2323 for index, item
in enumerate(self.
schedule):
2325 self.
process.schedule.insert(index, item)
2327 self.
process.schedule.append(item)
2329 result =
"# process.schedule imported from cff in HLTrigger.Configuration\n" 2331 result +=
'process.schedule.insert('+
str(index)+
', '+item+
')\n' 2338 self.
process.schedule.associate(getattr(self.
process, labelToAssociate))
2339 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n' 2343 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n" 2346 overrideThreads = (self.
_options.nThreads !=
"1")
2347 overrideConcurrentLumis = (self.
_options.nConcurrentLumis != defaultOptions.nConcurrentLumis)
2348 overrideConcurrentIOVs = (self.
_options.nConcurrentIOVs != defaultOptions.nConcurrentIOVs)
2350 if overrideThreads
or overrideConcurrentLumis
or overrideConcurrentIOVs:
2358 if overrideConcurrentLumis:
2359 self.
pythonCfgCode +=
"process.options.numberOfConcurrentLuminosityBlocks = "+self.
_options.nConcurrentLumis+
"\n" 2360 self.
process.options.numberOfConcurrentLuminosityBlocks =
int(self.
_options.nConcurrentLumis)
2361 if overrideConcurrentIOVs:
2362 self.
pythonCfgCode +=
"process.options.eventSetup.numberOfConcurrentIOVs = "+self.
_options.nConcurrentIOVs+
"\n" 2363 self.
process.options.eventSetup.numberOfConcurrentIOVs =
int(self.
_options.nConcurrentIOVs)
2365 if self.
_options.accelerators
is not None:
2366 accelerators = self.
_options.accelerators.split(
',')
2368 self.
pythonCfgCode +=
"# Enable only these accelerator backends\n" 2369 self.
pythonCfgCode +=
"process.load('Configuration.StandardSequences.Accelerators_cff')\n" 2370 self.
pythonCfgCode +=
"process.options.accelerators = ['" +
"', '".
join(accelerators) +
"']\n" 2371 self.
process.
load(
'Configuration.StandardSequences.Accelerators_cff')
2372 self.
process.options.accelerators = accelerators
2377 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n" 2378 self.
pythonCfgCode +=
"MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n" 2379 MassReplaceInputTag(self.
process, new=
"rawDataMapperByLabel", old=
"rawDataCollector")
2383 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n' 2391 for path
in self.
process.paths:
2401 print(
"--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2410 if hasattr(self.
process,
"logErrorHarvester"):
2412 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n" 2413 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n" 2414 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n" 2415 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2422 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n" 2423 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n" 2424 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n" 2426 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2429 imports = cms.specialImportRegistry.getSpecialImports()
2430 if len(imports) > 0:
2442 if not self.
_options.io.endswith(
'.io'): self._option.io+=
'.io' 2445 if hasattr(self.
process.source,
"fileNames"):
2446 if len(self.
process.source.fileNames.value()):
2447 ioJson[
'primary']=self.
process.source.fileNames.value()
2448 if hasattr(self.
process.source,
"secondaryFileNames"):
2449 if len(self.
process.source.secondaryFileNames.value()):
2450 ioJson[
'secondary']=self.
process.source.secondaryFileNames.value()
2451 if self.
_options.pileup_input
and (self.
_options.pileup_input.startswith(
'dbs:')
or self.
_options.pileup_input.startswith(
'das:')):
2452 ioJson[
'pileup']=self.
_options.pileup_input[4:]
2454 ioJson[o]=om.fileName.value()
2455 ioJson[
'GT']=self.
process.GlobalTag.globaltag.value()
2459 io.write(json.dumps(ioJson))
2462 def load(self, includeFile)
def prepare_L1(self, stepSpec=None)
def loadDefaultOrSpecifiedCFF(self, stepSpec, defaultCFF, defaultSEQ='')
def expandMapping(self, seqList, mapping, index=None)
def renameHLTprocessInSequence(self, sequence, proc=None, HLTprocess='HLT', verbosityLevel=1)
bool any(const std::vector< T > &v, const T &what)
def massSearchReplaceAnyInputTag(sequence, oldInputTag, newInputTag, verbose=False, moduleLabelOnly=False, skipLabelTest=False)
def finalizeFastSimHLT(self)
inliner
load the relevant part
def renameInputTagsInSequence(self, sequence, oldT="rawDataCollector", newT="rawDataRepacker")
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def replace(string, replacements)
def customiseEarlyDelete(process)
def prepare_CFWRITER(self, stepSpec=None)
def prepare_RECOBEFMIX(self, stepSpec="reconstruction")
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
def filesFromDASQuery(query, option="", s=None)
def prepare_PATFILTER(self, stepSpec=None)
def build_production_info(self, evt_type, evtnumber)
def prepare_RECOSIM(self, stepSpec="recosim")
def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=())
def prepare_L1HwVal(self, stepSpec='L1HwVal')
def prepare_DIGI2RAW(self, stepSpec=None)
def prepare_POSTRECO(self, stepSpec=None)
def prepare_SKIM(self, stepSpec="all")
def prepare_ALCAPRODUCER(self, stepSpec=None)
def prepare_HARVESTING(self, stepSpec=None)
def prepare_ALCAOUTPUT(self, stepSpec=None)
def prepare_RAW2DIGI(self, stepSpec="RawToDigi")
def prepare_GEN(self, stepSpec=None)
def prepare_FILTER(self, stepSpec=None)
scheduleIndexOfFirstHLTPath
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def anyOf(listOfKeys, dict, opt=None)
def addExtraStream(self, name, stream, workflow='full')
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
def prepare_PAT(self, stepSpec="miniAOD")
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
def addCustomise(self, unsch=0)
def prepare_L1Reco(self, stepSpec="L1Reco")
def prepare_HLT(self, stepSpec=None)
def addCustomiseCmdLine(self)
def scheduleSequence(self, seq, prefix, what='Path')
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def prepare_DIGI(self, stepSpec=None)
def loadAndRemember(self, includeFile)
def prepare_ENDJOB(self, stepSpec='endOfProcess')
def throwAndSetRandomRun(source, runsAndProbs)
def prepare_DQM(self, stepSpec='DQMOffline')
def prepare_ALCAHARVEST(self, stepSpec=None)
def split(sequence, size)
static std::string join(char **cmd)
def prepare_USER(self, stepSpec=None)
def prepare_ALCA(self, stepSpec=None, workflow='full')
def dumpPython(process, name)
def prepare(self, doChecking=False)
def associatePatAlgosToolsTask(process)
def prepare_REPACK(self, stepSpec=None)
def prepare_NANOGEN(self, stepSpec="nanoAOD")
def __init__(self, options, process=None, with_output=False, with_input=False)
def prepare_L1REPACK(self, stepSpec=None)
def prepare_L1TrackTrigger(self, stepSpec="L1TrackTrigger")
def prepare_RAW2RECO(self, stepSpec=None)
def prepare_NANO(self, stepSpec='')
def prepare_VALIDATION(self, stepSpec='validation')
def lumi_to_run(runs, events_in_sample, events_per_job)
def scheduleSequenceAtEnd(self, seq, prefix)
def prepare_RECO(self, stepSpec="reconstruction")
def prepare_SIM(self, stepSpec=None)
def filesFromList(fileName, s=None)
def customiseLogErrorHarvesterUsingOutputCommands(process)
def doIt(self, pset, base)
def heapProfileOptions(self)
def prepare_PATGEN(self, stepSpec="miniGEN")
def prepare_LHE(self, stepSpec=None)
def completeInputCommand(self)
def prepare_DATAMIX(self, stepSpec=None)
def executeAndRemember(self, command)
def addStandardSequences(self)
nextScheduleIsConditional
put the filtering path in the schedule