3 from __future__
import print_function
4 __version__ =
"$Revision: 1.19 $"
5 __source__ =
"$Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v $"
7 import FWCore.ParameterSet.Config
as cms
8 from FWCore.ParameterSet.Modules
import _Module
11 from FWCore.ParameterSet.MassReplace
import massReplaceInputTag
as MassReplaceInputTag
17 from subprocess
import Popen,PIPE
18 import FWCore.ParameterSet.DictTypes
as DictTypes
23 defaultOptions = Options()
24 defaultOptions.datamix =
'DataOnSim'
25 defaultOptions.isMC=
False
26 defaultOptions.isData=
True
27 defaultOptions.step=
''
28 defaultOptions.pileup=
'NoPileUp'
29 defaultOptions.pileup_input =
None
30 defaultOptions.pileup_dasoption =
''
31 defaultOptions.geometry =
'SimDB'
32 defaultOptions.geometryExtendedOptions = [
'ExtendedGFlash',
'Extended',
'NoCastor']
33 defaultOptions.magField =
''
34 defaultOptions.conditions =
None
35 defaultOptions.scenarioOptions=[
'pp',
'cosmics',
'nocoll',
'HeavyIons']
36 defaultOptions.harvesting=
'AtRunEnd'
37 defaultOptions.gflash =
False
38 defaultOptions.number = -1
39 defaultOptions.number_out =
None
40 defaultOptions.arguments =
""
41 defaultOptions.name =
"NO NAME GIVEN"
42 defaultOptions.evt_type =
""
43 defaultOptions.filein =
""
44 defaultOptions.dasquery=
""
45 defaultOptions.dasoption=
""
46 defaultOptions.secondfilein =
""
47 defaultOptions.customisation_file = []
48 defaultOptions.customisation_file_unsch = []
49 defaultOptions.customise_commands =
""
50 defaultOptions.inline_custom=
False
51 defaultOptions.particleTable =
'pythiapdt'
52 defaultOptions.particleTableList = [
'pythiapdt',
'pdt']
53 defaultOptions.dirin =
''
54 defaultOptions.dirout =
''
55 defaultOptions.filetype =
'EDM'
56 defaultOptions.fileout =
'output.root'
57 defaultOptions.filtername =
''
58 defaultOptions.lazy_download =
False
59 defaultOptions.custom_conditions =
''
60 defaultOptions.hltProcess =
''
61 defaultOptions.eventcontent =
None
62 defaultOptions.datatier =
None
63 defaultOptions.inlineEventContent =
True
64 defaultOptions.inlineObjets =
''
65 defaultOptions.hideGen=
False
66 from Configuration.StandardSequences.VtxSmeared
import VtxSmearedDefaultKey,VtxSmearedHIDefaultKey
67 defaultOptions.beamspot=
None
68 defaultOptions.outputDefinition =
''
69 defaultOptions.inputCommands =
None
70 defaultOptions.outputCommands =
None
71 defaultOptions.inputEventContent =
''
72 defaultOptions.dropDescendant =
False
73 defaultOptions.relval =
None
74 defaultOptions.profile =
None
75 defaultOptions.isRepacked =
False
76 defaultOptions.restoreRNDSeeds =
False
77 defaultOptions.donotDropOnInput =
''
78 defaultOptions.python_filename =
''
79 defaultOptions.io=
None
80 defaultOptions.lumiToProcess=
None
81 defaultOptions.fast=
False
82 defaultOptions.runsAndWeightsForMC =
None
83 defaultOptions.runsScenarioForMC =
None
84 defaultOptions.runsAndWeightsForMCIntegerWeights =
None
85 defaultOptions.runsScenarioForMCIntegerWeights =
None
86 defaultOptions.runUnscheduled =
False
87 defaultOptions.timeoutOutput =
False
88 defaultOptions.nThreads =
'1'
89 defaultOptions.nStreams =
'0'
90 defaultOptions.nConcurrentLumis =
'0'
91 defaultOptions.nConcurrentIOVs =
'1'
95 theObject = getattr(process,name)
96 if isinstance(theObject,cms.Path)
or isinstance(theObject,cms.EndPath)
or isinstance(theObject,cms.Sequence):
97 return "process."+name+
" = " + theObject.dumpPython()
98 elif isinstance(theObject,_Module)
or isinstance(theObject,cms.ESProducer):
99 return "process."+name+
" = " + theObject.dumpPython()+
"\n"
101 return "process."+name+
" = " + theObject.dumpPython()+
"\n"
104 import FWCore.ParameterSet.Config
as cms
107 for line
in open(fileName,
'r'):
108 if line.count(
".root")>=2:
110 entries=line.replace(
"\n",
"").
split()
111 prim.append(entries[0])
112 sec.append(entries[1])
113 elif (line.find(
".root")!=-1):
114 entry=line.replace(
"\n",
"")
117 prim = sorted(list(set(prim)))
118 sec = sorted(list(set(sec)))
120 if not hasattr(s,
"fileNames"):
121 s.fileNames=cms.untracked.vstring(prim)
123 s.fileNames.extend(prim)
125 if not hasattr(s,
"secondaryFileNames"):
126 s.secondaryFileNames=cms.untracked.vstring(sec)
128 s.secondaryFileNames.extend(sec)
129 print(
"found files: ",prim)
131 raise Exception(
"There are not files in input from the file list")
133 print(
"found parent files:",sec)
138 import FWCore.ParameterSet.Config
as cms
141 print(
"the query is",query)
144 while eC!=0
and count<3:
146 print(
'Sleeping, then retrying DAS')
148 p = Popen(
'dasgoclient %s --query "%s"'%(option,query), stdout=PIPE,shell=
True, universal_newlines=
True)
150 tupleP = os.waitpid(p.pid, 0)
154 print(
"DAS succeeded after",count,
"attempts",eC)
156 print(
"DAS failed 3 times- I give up")
157 for line
in pipe.split(
'\n'):
158 if line.count(
".root")>=2:
160 entries=line.replace(
"\n",
"").
split()
161 prim.append(entries[0])
162 sec.append(entries[1])
163 elif (line.find(
".root")!=-1):
164 entry=line.replace(
"\n",
"")
167 prim = sorted(list(set(prim)))
168 sec = sorted(list(set(sec)))
170 if not hasattr(s,
"fileNames"):
171 s.fileNames=cms.untracked.vstring(prim)
173 s.fileNames.extend(prim)
175 if not hasattr(s,
"secondaryFileNames"):
176 s.secondaryFileNames=cms.untracked.vstring(sec)
178 s.secondaryFileNames.extend(sec)
179 print(
"found files: ",prim)
181 print(
"found parent files:",sec)
184 def anyOf(listOfKeys,dict,opt=None):
193 raise Exception(
"any of "+
','.
join(listOfKeys)+
" are mandatory entries of --output options")
196 """The main building routines """
198 def __init__(self, options, process = None, with_output = False, with_input = False ):
199 """options taken from old cmsDriver and optparse """
201 options.outfile_name = options.dirout+options.fileout
205 if self._options.isData
and options.isMC:
206 raise Exception(
"ERROR: You may specify only --data or --mc, not both")
211 if 'ENDJOB' in self._options.step:
212 if (hasattr(self.
_options,
"outputDefinition")
and \
213 self._options.outputDefinition !=
'' and \
214 any(
anyOf([
't',
'tier',
'dataTier'],outdic) ==
'DQMIO' for outdic
in eval(self._options.outputDefinition)))
or \
215 (hasattr(self.
_options,
"datatier")
and \
216 self._options.datatier
and \
217 'DQMIO' in self._options.datatier):
218 print(
"removing ENDJOB from steps since not compatible with DQMIO dataTier")
219 self._options.step=self._options.step.replace(
',ENDJOB',
'')
224 stepList = [re.sub(
r'^prepare_',
'', methodName)
for methodName
in ConfigBuilder.__dict__
if methodName.startswith(
'prepare_')]
227 for step
in self._options.step.split(
","):
228 if step==
'':
continue
229 stepParts = step.split(
":")
230 stepName = stepParts[0]
231 if stepName
not in stepList
and not stepName.startswith(
're'):
232 raise ValueError(
"Step "+stepName+
" unknown")
233 if len(stepParts)==1:
235 elif len(stepParts)==2:
237 elif len(stepParts)==3:
238 self.
stepMap[stepName]=(stepParts[2].
split(
'+'),stepParts[1])
240 raise ValueError(
"Step definition "+step+
" invalid")
241 self.stepKeys.append(stepName)
248 if hasattr(self.
_options,
"no_output_flag")
and self._options.no_output_flag:
276 Function to add the igprof profile service so that you can dump in the middle
279 profileOpts = self._options.profile.split(
':')
281 profilerInterval = 100
282 profilerFormat =
None
283 profilerJobFormat =
None
289 startEvent = profileOpts.pop(0)
290 if not startEvent.isdigit():
291 raise Exception(
"%s is not a number" % startEvent)
292 profilerStart = int(startEvent)
294 eventInterval = profileOpts.pop(0)
295 if not eventInterval.isdigit():
296 raise Exception(
"%s is not a number" % eventInterval)
297 profilerInterval = int(eventInterval)
299 profilerFormat = profileOpts.pop(0)
302 if not profilerFormat:
303 profilerFormat =
"%s___%s___%%I.gz" % (
304 self._options.evt_type.replace(
"_cfi",
""),
306 (
str(self._options.step) +
str(self._options.pileup) +
str(self._options.conditions) +
307 str(self._options.datatier) +
str(self._options.profileTypeLabel)).
encode(
'utf-8')
310 if not profilerJobFormat
and profilerFormat.endswith(
".gz"):
311 profilerJobFormat = profilerFormat.replace(
".gz",
"_EndOfJob.gz")
312 elif not profilerJobFormat:
313 profilerJobFormat = profilerFormat +
"_EndOfJob.gz"
315 return (profilerStart,profilerInterval,profilerFormat,profilerJobFormat)
318 includeFile = includeFile.replace(
'/',
'.')
319 self.process.load(includeFile)
320 return sys.modules[includeFile]
323 """helper routine to load am memorize imports"""
326 includeFile = includeFile.replace(
'/',
'.')
327 self.imports.append(includeFile)
328 self.process.load(includeFile)
329 return sys.modules[includeFile]
332 """helper routine to remember replace statements"""
333 self.additionalCommands.append(command)
334 if not command.strip().startswith(
"#"):
337 exec(re.sub(
r"([^a-zA-Z_0-9]|^)(process)([^a-zA-Z_0-9])",
r"\1self.process\3",command))
341 if 'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys():
342 self.process.options.Rethrow = [
'ProductNotFound']
343 self.process.options.fileMode =
'FULLMERGE'
345 self.addedObjects.append((
"",
"options"))
347 if self._options.lazy_download:
348 self.process.AdaptorConfig = cms.Service(
"AdaptorConfig",
349 stats = cms.untracked.bool(
True),
350 enable = cms.untracked.bool(
True),
351 cacheHint = cms.untracked.string(
"lazy-download"),
352 readHint = cms.untracked.string(
"read-ahead-buffered")
354 self.addedObjects.append((
"Setup lazy download",
"AdaptorConfig"))
359 if self._options.profile:
361 self.process.IgProfService = cms.Service(
"IgProfService",
362 reportFirstEvent = cms.untracked.int32(start),
363 reportEventInterval = cms.untracked.int32(interval),
364 reportToFileAtPostEvent = cms.untracked.string(
"| gzip -c > %s"%(eventFormat)),
365 reportToFileAtPostEndJob = cms.untracked.string(
"| gzip -c > %s"%(jobFormat)))
366 self.addedObjects.append((
"Setup IGProf Service for profiling",
"IgProfService"))
369 """Here we decide how many evts will be processed"""
370 self.process.maxEvents.input = int(self._options.number)
371 if self._options.number_out:
372 self.process.maxEvents.output = int(self._options.number_out)
373 self.addedObjects.append((
"",
"maxEvents"))
376 """Here the source is built. Priority: file, generator"""
377 self.addedObjects.append((
"Input source",
"source"))
379 def filesFromOption(self):
380 for entry
in self._options.filein.split(
','):
382 if entry.startswith(
"filelist:"):
384 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
385 filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption,self.process.source)
387 self.process.source.fileNames.append(self._options.dirin+entry)
388 if self._options.secondfilein:
389 if not hasattr(self.process.source,
"secondaryFileNames"):
390 raise Exception(
"--secondfilein not compatible with "+self._options.filetype+
"input type")
391 for entry
in self._options.secondfilein.split(
','):
393 if entry.startswith(
"filelist:"):
394 self.process.source.secondaryFileNames.extend((
filesFromList(entry[9:]))[0])
395 elif entry.startswith(
"dbs:")
or entry.startswith(
"das:"):
396 self.process.source.secondaryFileNames.extend((
filesFromDASQuery(
'file dataset = %s'%(entry[4:]),self._options.dasoption))[0])
398 self.process.source.secondaryFileNames.append(self._options.dirin+entry)
400 if self._options.filein
or self._options.dasquery:
401 if self._options.filetype ==
"EDM":
402 self.process.source=cms.Source(
"PoolSource",
403 fileNames = cms.untracked.vstring(),
404 secondaryFileNames= cms.untracked.vstring())
405 filesFromOption(self)
406 elif self._options.filetype ==
"DAT":
407 self.process.source=cms.Source(
"NewEventStreamFileReader",fileNames = cms.untracked.vstring())
408 filesFromOption(self)
409 elif self._options.filetype ==
"LHE":
410 self.process.source=cms.Source(
"LHESource", fileNames = cms.untracked.vstring())
411 if self._options.filein.startswith(
"lhe:"):
413 args=self._options.filein.split(
':')
415 print(
'LHE input from article ',article)
416 location=
'/store/lhe/'
418 textOfFiles=os.popen(
'cmsLHEtoEOSManager.py -l '+article)
419 for line
in textOfFiles:
420 for fileName
in [x
for x
in line.split()
if '.lhe' in x]:
421 self.process.source.fileNames.append(location+article+
'/'+fileName)
424 print(
'Issue to load LHE files, please check and try again.')
427 if len(self.process.source.fileNames)==0:
428 print(
'Issue with empty filename, but can pass line check')
431 self.process.source.skipEvents = cms.untracked.uint32(int(args[2]))
433 filesFromOption(self)
435 elif self._options.filetype ==
"DQM":
436 self.process.source=cms.Source(
"DQMRootSource",
437 fileNames = cms.untracked.vstring())
438 filesFromOption(self)
440 elif self._options.filetype ==
"DQMDAQ":
442 self.process.source=cms.Source(
"DQMStreamerReader")
445 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
446 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
448 if self._options.dasquery!=
'':
449 self.process.source=cms.Source(
"PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
450 filesFromDASQuery(self._options.dasquery,self._options.dasoption,self.process.source)
452 if (
'HARVESTING' in self.stepMap.keys()
or 'ALCAHARVEST' in self.stepMap.keys())
and (
not self._options.filetype ==
"DQM"):
453 self.process.source.processingMode = cms.untracked.string(
"RunsAndLumis")
456 if 'GEN' in self.stepMap.keys()
and not self._options.filetype ==
"LHE":
457 if self._options.inputCommands:
458 self._options.inputCommands+=
',drop LHEXMLStringProduct_*_*_*,'
460 self._options.inputCommands=
'keep *, drop LHEXMLStringProduct_*_*_*,'
462 if self.process.source
and self._options.inputCommands
and not self._options.filetype ==
"LHE":
463 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
464 for command
in self._options.inputCommands.split(
','):
466 command = command.strip()
467 if command==
'':
continue
468 self.process.source.inputCommands.append(command)
469 if not self._options.dropDescendant:
470 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
472 if self._options.lumiToProcess:
473 import FWCore.PythonUtilities.LumiList
as LumiList
474 self.process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange(
LumiList.LumiList(self._options.lumiToProcess).getCMSSWString().
split(
',') )
476 if 'GEN' in self.stepMap.keys()
or 'LHE' in self.
stepMap or (
not self._options.filein
and hasattr(self.
_options,
"evt_type")):
477 if self.process.source
is None:
478 self.process.source=cms.Source(
"EmptySource")
482 if self._options.runsAndWeightsForMC
or self._options.runsScenarioForMC :
483 if not self._options.isMC :
484 raise Exception(
"options --runsAndWeightsForMC and --runsScenarioForMC are only valid for MC")
485 if self._options.runsAndWeightsForMC:
488 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
489 if isinstance(RunsAndWeights[self._options.runsScenarioForMC], str):
490 __import__(RunsAndWeights[self._options.runsScenarioForMC])
491 self.
runsAndWeights = sys.modules[RunsAndWeights[self._options.runsScenarioForMC]].runProbabilityDistribution
493 self.
runsAndWeights = RunsAndWeights[self._options.runsScenarioForMC]
496 import SimGeneral.Configuration.ThrowAndSetRandomRun
as ThrowAndSetRandomRun
498 self.additionalCommands.append(
'import SimGeneral.Configuration.ThrowAndSetRandomRun as ThrowAndSetRandomRun')
499 self.additionalCommands.append(
'ThrowAndSetRandomRun.throwAndSetRandomRun(process.source,%s)'%(self.
runsAndWeights))
503 if self._options.runsAndWeightsForMCIntegerWeights
or self._options.runsScenarioForMCIntegerWeights:
504 if not self._options.isMC :
505 raise Exception(
"options --runsAndWeightsForMCIntegerWeights and --runsScenarioForMCIntegerWeights are only valid for MC")
506 if self._options.runsAndWeightsForMCIntegerWeights:
509 from Configuration.StandardSequences.RunsAndWeights
import RunsAndWeights
510 if isinstance(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights], str):
511 __import__(RunsAndWeights[self._options.runsScenarioForMCIntegerWeights])
512 self.
runsAndWeightsInt = sys.modules[RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]].runProbabilityDistribution
514 self.
runsAndWeightsInt = RunsAndWeights[self._options.runsScenarioForMCIntegerWeights]
517 if not self._options.relval:
518 raise Exception(
"--relval option required when using --runsAndWeightsInt")
519 if 'DATAMIX' in self._options.step:
520 from SimGeneral.Configuration.LumiToRun
import lumi_to_run
521 total_events, events_per_job = self._options.relval.split(
',')
523 self.additionalCommands.append(
"process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in " +
str(lumi_to_run_mapping) +
"])")
528 """ Add output module to the process """
530 if self._options.outputDefinition:
531 if self._options.datatier:
532 print(
"--datatier & --eventcontent options ignored")
535 outList = eval(self._options.outputDefinition)
536 for (id,outDefDict)
in enumerate(outList):
537 outDefDictStr=outDefDict.__str__()
538 if not isinstance(outDefDict,dict):
539 raise Exception(
"--output needs to be passed a list of dict"+self._options.outputDefinition+
" is invalid")
541 theTier=
anyOf([
't',
'tier',
'dataTier'],outDefDict)
544 theStreamType=
anyOf([
'e',
'ec',
'eventContent',
'streamType'],outDefDict,theTier)
545 theFilterName=
anyOf([
'f',
'ftN',
'filterName'],outDefDict,
'')
546 theSelectEvent=
anyOf([
's',
'sE',
'selectEvents'],outDefDict,
'')
547 theModuleLabel=
anyOf([
'l',
'mL',
'moduleLabel'],outDefDict,
'')
548 theExtraOutputCommands=
anyOf([
'o',
'oC',
'outputCommands'],outDefDict,
'')
550 if not theModuleLabel:
551 tryNames=[theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+
'output',
552 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+
'output',
553 theStreamType.replace(theTier.replace(
'-',
''),
'')+theTier.replace(
'-',
'')+theFilterName+theSelectEvent.split(
',')[0].
replace(
':',
'for').
replace(
' ',
'')+
'output'
555 for name
in tryNames:
556 if not hasattr(self.
process,name):
559 if not theModuleLabel:
560 raise Exception(
"cannot find a module label for specification: "+outDefDictStr)
562 defaultFileName=self._options.outfile_name
564 defaultFileName=self._options.outfile_name.replace(
'.root',
'_in'+theTier+
'.root')
566 theFileName=self._options.dirout+
anyOf([
'fn',
'fileName'],outDefDict,defaultFileName)
567 if not theFileName.endswith(
'.root'):
571 raise Exception(
"unused keys from --output options: "+
','.
join(outDefDict.keys()))
572 if theStreamType==
'DQMIO': theStreamType=
'DQM'
573 if theStreamType==
'ALL':
574 theEventContent = cms.PSet(outputCommands = cms.untracked.vstring(
'keep *'))
576 theEventContent = getattr(self.
process, theStreamType+
"EventContent")
580 if theStreamType==
'ALCARECO' and not theFilterName:
581 theFilterName=
'StreamALCACombined'
584 CppType=
'PoolOutputModule'
585 if self._options.timeoutOutput:
586 CppType=
'TimeoutPoolOutputModule'
587 if theStreamType==
'DQM' and theTier==
'DQMIO': CppType=
'DQMRootOutputModule'
588 output = cms.OutputModule(CppType,
589 theEventContent.clone(),
590 fileName = cms.untracked.string(theFileName),
591 dataset = cms.untracked.PSet(
592 dataTier = cms.untracked.string(theTier),
593 filterName = cms.untracked.string(theFilterName))
595 if not theSelectEvent
and hasattr(self.
process,
'generation_step')
and theStreamType!=
'LHE':
596 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
597 if not theSelectEvent
and hasattr(self.
process,
'filtering_step'):
598 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
600 output.SelectEvents =cms.untracked.PSet(SelectEvents = cms.vstring(theSelectEvent))
603 if not hasattr(output,
'SelectEvents'):
604 output.SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring())
606 output.SelectEvents.SelectEvents.extend(getattr(self.
process,
'OutALCARECO'+alca).SelectEvents.SelectEvents)
609 if hasattr(self.
process,theModuleLabel):
610 raise Exception(
"the current process already has a module "+theModuleLabel+
" defined")
612 setattr(self.
process,theModuleLabel,output)
613 outputModule=getattr(self.
process,theModuleLabel)
614 setattr(self.
process,theModuleLabel+
'_step',cms.EndPath(outputModule))
615 path=getattr(self.
process,theModuleLabel+
'_step')
616 self.schedule.append(path)
618 if not self._options.inlineEventContent
and hasattr(self.
process,theStreamType+
"EventContent"):
619 def doNotInlineEventContent(instance,label = "cms.untracked.vstring(process.
"+theStreamType+"EventContent.outputCommands)
"):
621 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
622 if theExtraOutputCommands:
623 if not isinstance(theExtraOutputCommands,list):
624 raise Exception(
"extra ouput command in --option must be a list of strings")
625 if hasattr(self.
process,theStreamType+
"EventContent"):
626 self.
executeAndRemember(
'process.%s.outputCommands.extend(%s)'%(theModuleLabel,theExtraOutputCommands))
628 outputModule.outputCommands.extend(theExtraOutputCommands)
630 result+=
"\nprocess."+theModuleLabel+
" = "+outputModule.dumpPython()
635 streamTypes=self._options.eventcontent.split(
',')
636 tiers=self._options.datatier.split(
',')
637 if not self._options.outputDefinition
and len(streamTypes)!=len(tiers):
638 raise Exception(
"number of event content arguments does not match number of datatier arguments")
641 if self._options.step.split(
',')[0].
split(
':')[0] ==
'ALCA':
644 for i,(streamType,tier)
in enumerate(
zip(streamTypes,tiers)):
645 if streamType==
'':
continue
646 if streamType ==
'ALCARECO' and not 'ALCAPRODUCER' in self._options.step:
continue
647 if streamType==
'DQMIO': streamType=
'DQM'
648 eventContent=streamType
650 if streamType ==
"NANOEDMAOD" :
651 eventContent =
"NANOAOD"
652 elif streamType ==
"NANOEDMAODSIM" :
653 eventContent =
"NANOAODSIM"
654 theEventContent = getattr(self.
process, eventContent+
"EventContent")
656 theFileName=self._options.outfile_name
657 theFilterName=self._options.filtername
659 theFileName=self._options.outfile_name.replace(
'.root',
'_in'+streamType+
'.root')
660 theFilterName=self._options.filtername
661 CppType=
'PoolOutputModule'
662 if self._options.timeoutOutput:
663 CppType=
'TimeoutPoolOutputModule'
664 if streamType==
'DQM' and tier==
'DQMIO': CppType=
'DQMRootOutputModule'
665 if "NANOAOD" in streamType : CppType=
'NanoAODOutputModule'
666 output = cms.OutputModule(CppType,
668 fileName = cms.untracked.string(theFileName),
669 dataset = cms.untracked.PSet(dataTier = cms.untracked.string(tier),
670 filterName = cms.untracked.string(theFilterName)
673 if hasattr(self.
process,
"generation_step")
and streamType!=
'LHE':
674 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'generation_step'))
675 if hasattr(self.
process,
"filtering_step"):
676 output.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring(
'filtering_step'))
678 if streamType==
'ALCARECO':
679 output.dataset.filterName = cms.untracked.string(
'StreamALCACombined')
681 if "MINIAOD" in streamType:
682 from PhysicsTools.PatAlgos.slimming.miniAOD_tools
import miniAOD_customizeOutput
685 outputModuleName=streamType+
'output'
686 setattr(self.
process,outputModuleName,output)
687 outputModule=getattr(self.
process,outputModuleName)
688 setattr(self.
process,outputModuleName+
'_step',cms.EndPath(outputModule))
689 path=getattr(self.
process,outputModuleName+
'_step')
690 self.schedule.append(path)
692 if self._options.outputCommands
and streamType!=
'DQM':
693 for evct
in self._options.outputCommands.split(
','):
694 if not evct:
continue
695 self.
executeAndRemember(
"process.%s.outputCommands.append('%s')"%(outputModuleName,evct.strip()))
697 if not self._options.inlineEventContent:
698 tmpstreamType=streamType
699 if "NANOEDM" in tmpstreamType :
700 tmpstreamType=tmpstreamType.replace(
"NANOEDM",
"NANO")
701 def doNotInlineEventContent(instance,label = "process."+tmpstreamType+"EventContent.outputCommands"):
703 outputModule.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
705 result+=
"\nprocess."+outputModuleName+
" = "+outputModule.dumpPython()
711 Add selected standard sequences to the process
714 if self._options.pileup:
715 pileupSpec=self._options.pileup.split(
',')[0]
718 from Configuration.StandardSequences.Mixing
import Mixing,defineMixing
719 if not pileupSpec
in Mixing
and '.' not in pileupSpec
and 'file:' not in pileupSpec:
720 message = pileupSpec+
' is not a know mixing scenario:\n available are: '+
'\n'.
join(Mixing.keys())
724 if '.' in pileupSpec:
725 mixingDict={
'file':pileupSpec}
726 elif pileupSpec.startswith(
'file:'):
727 mixingDict={
'file':pileupSpec[5:]}
730 mixingDict=copy.copy(Mixing[pileupSpec])
731 if len(self._options.pileup.split(
','))>1:
732 mixingDict.update(eval(self._options.pileup[self._options.pileup.find(
',')+1:]))
735 if 'file:' in pileupSpec:
737 self.process.load(mixingDict[
'file'])
738 print(
"inlining mixing module configuration")
739 self._options.inlineObjets+=
',mix'
743 mixingDict.pop(
'file')
744 if not "DATAMIX" in self.stepMap.keys():
745 if self._options.pileup_input:
746 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
747 mixingDict[
'F']=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
748 elif self._options.pileup_input.startswith(
"filelist:"):
749 mixingDict[
'F']=(
filesFromList(self._options.pileup_input[9:]))[0]
751 mixingDict[
'F']=self._options.pileup_input.split(
',')
753 for command
in specialization:
755 if len(mixingDict)!=0:
756 raise Exception(
'unused mixing specification: '+mixingDict.keys().
__str__())
763 if (
'SIM' in self.
stepMap or 'reSIM' in self.
stepMap)
and not self._options.fast:
770 print(
"Geometry option",self._options.geometry,
"unknown.")
777 stepSpec = self.
stepMap[stepName]
778 print(
"Step:", stepName,
"Spec:",stepSpec)
779 if stepName.startswith(
're'):
781 if stepName[2:]
not in self._options.donotDropOnInput:
782 self._options.inputEventContent=
'%s,%s'%(stepName.upper(),self._options.inputEventContent)
783 stepName=stepName[2:]
785 getattr(self,
"prepare_"+stepName)(sequence = getattr(self,stepName+
"DefaultSeq"))
786 elif isinstance(stepSpec, list):
787 getattr(self,
"prepare_"+stepName)(sequence =
'+'.
join(stepSpec))
788 elif isinstance(stepSpec, tuple):
789 getattr(self,
"prepare_"+stepName)(sequence =
','.
join([stepSpec[1],
'+'.
join(stepSpec[0])]))
791 raise ValueError(
"Invalid step definition")
793 if self._options.restoreRNDSeeds!=
False:
795 if self._options.restoreRNDSeeds==
True:
796 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateLabel=cms.untracked.string("randomEngineStateProducer")')
798 self.
executeAndRemember(
'process.RandomNumberGeneratorService.restoreStateTag=cms.untracked.InputTag("randomEngineStateProducer","","%s")'%(self._options.restoreRNDSeeds))
799 if self._options.inputEventContent
or self._options.inputCommands:
800 if self._options.inputCommands:
801 self._options.inputCommands+=
'keep *_randomEngineStateProducer_*_*,'
803 self._options.inputCommands=
'keep *_randomEngineStateProducer_*_*,'
807 if self._options.inputEventContent:
809 def dropSecondDropStar(iec):
819 if not hasattr(self.process.source,
'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
820 for evct
in self._options.inputEventContent.split(
','):
821 if evct==
'':
continue
822 theEventContent = getattr(self.
process, evct+
"EventContent")
823 if hasattr(theEventContent,
'outputCommands'):
824 self.process.source.inputCommands.extend(copy.copy(theEventContent.outputCommands))
825 if hasattr(theEventContent,
'inputCommands'):
826 self.process.source.inputCommands.extend(copy.copy(theEventContent.inputCommands))
828 dropSecondDropStar(self.process.source.inputCommands)
830 if not self._options.dropDescendant:
831 self.process.source.dropDescendantsOfDroppedBranches = cms.untracked.bool(
False)
837 """Add conditions to the process"""
838 if not self._options.conditions:
return
840 if 'FrontierConditions_GlobalTag' in self._options.conditions:
841 print(
'using FrontierConditions_GlobalTag in --conditions is not necessary anymore and will be deprecated soon. please update your command line')
842 self._options.conditions = self._options.conditions.replace(
"FrontierConditions_GlobalTag,",
'')
845 from Configuration.AlCa.GlobalTag
import GlobalTag
846 self.process.GlobalTag =
GlobalTag(self.process.GlobalTag, self._options.conditions, self._options.custom_conditions)
847 self.additionalCommands.append(
'from Configuration.AlCa.GlobalTag import GlobalTag')
848 self.additionalCommands.append(
'process.GlobalTag = GlobalTag(process.GlobalTag, %s, %s)' % (repr(self._options.conditions), repr(self._options.custom_conditions)))
852 """Include the customise code """
856 for c
in self._options.customisation_file:
857 custOpt.extend(c.split(
","))
859 for c
in self._options.customisation_file_unsch:
860 custOpt.extend(c.split(
","))
866 raise Exception(
"more than . in the specification:"+opt)
867 fileName=opt.split(
'.')[0]
868 if opt.count(
'.')==0: rest=
'customise'
870 rest=opt.split(
'.')[1]
871 if rest==
'py': rest=
'customise'
873 if fileName
in custMap:
874 custMap[fileName].extend(rest.split(
'+'))
876 custMap[fileName]=rest.split(
'+')
881 final_snippet=
'\n# customisation of the process.\n'
885 allFcn.extend(custMap[opt])
887 if allFcn.count(fcn)!=1:
888 raise Exception(
"cannot specify twice "+fcn+
" as a customisation method")
892 packageName = f.replace(
".py",
"").
replace(
"/",
".")
893 __import__(packageName)
894 package = sys.modules[packageName]
897 customiseFile = re.sub(
r'\.pyc$',
'.py', package.__file__)
899 final_snippet+=
'\n# Automatic addition of the customisation function from '+packageName+
'\n'
900 if self._options.inline_custom:
901 for line
in file(customiseFile,
'r'):
902 if "import FWCore.ParameterSet.Config" in line:
904 final_snippet += line
906 final_snippet +=
'from %s import %s \n'%(packageName,
','.
join(custMap[f]))
907 for fcn
in custMap[f]:
908 print(
"customising the process with",fcn,
"from",f)
909 if not hasattr(package,fcn):
911 raise Exception(
"config "+f+
" has no function "+fcn)
915 final_snippet +=
"\n#call to customisation function "+fcn+
" imported from "+packageName
916 final_snippet +=
"\nprocess = %s(process)\n"%(fcn,)
919 final_snippet +=
'\n# End of customisation functions\n'
925 final_snippet=
'\n# Customisation from command line\n'
926 if self._options.customise_commands:
928 for com
in self._options.customise_commands.split(
'\\n'):
931 final_snippet +=
'\n'+com
942 if self._options.particleTable
not in defaultOptions.particleTableList:
943 print(
'Invalid particle table provided. Options are:')
944 print(defaultOptions.particleTable)
948 self.
loadAndRemember(
'SimGeneral.HepPDTESSource.'+self._options.particleTable+
'_cfi')
961 if self._options.isRepacked: self.
RAW2DIGIDefaultCFF=
"Configuration/StandardSequences/RawToDigi_DataMapper_cff"
982 if "DATAMIX" in self.stepMap.keys():
986 self.
L1EMDefaultCFF=
'Configuration/StandardSequences/SimL1EmulatorDM_cff'
1004 if self._options.fast
or (
'RAW2DIGI' in self.
stepMap and 'RECO' in self.
stepMap):
1024 if not self._options.beamspot:
1025 self._options.beamspot=VtxSmearedDefaultKey
1028 if self._options.isMC==
True:
1030 self.
RECODefaultCFF=
"Configuration/StandardSequences/Reconstruction_cff"
1031 self.
PATDefaultCFF=
"Configuration/StandardSequences/PATMC_cff"
1034 self.
ALCADefaultCFF=
"Configuration/StandardSequences/AlCaRecoStreamsMC_cff"
1037 self._options.beamspot =
None
1043 if self._options.scenario==
'cosmics':
1044 self._options.pileup=
'Cosmics'
1045 self.
DIGIDefaultCFF=
"Configuration/StandardSequences/DigiCosmics_cff"
1046 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionCosmics_cff"
1047 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsCosmics_cff"
1051 if self._options.isMC==
True:
1057 if self._options.scenario==
'HeavyIons':
1058 if not self._options.beamspot:
1059 self._options.beamspot=VtxSmearedHIDefaultKey
1064 self.
RECODefaultCFF=
"Configuration/StandardSequences/ReconstructionHeavyIons_cff"
1066 self.
ALCADefaultCFF =
"Configuration/StandardSequences/AlCaRecoStreamsHeavyIons_cff"
1069 self.
SKIMDefaultCFF=
"Configuration/StandardSequences/SkimsHeavyIons_cff"
1071 if self._options.isMC==
True:
1081 self.
magFieldCFF =
'Configuration/StandardSequences/MagneticField_'+self._options.magField.replace(
'.',
'')+
'_cff'
1082 self.
magFieldCFF = self.magFieldCFF.replace(
"__",
'_')
1088 if self._options.fast:
1089 if 'start' in self._options.conditions.lower():
1090 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_START_cff'
1092 self.
GeometryCFF=
'FastSimulation/Configuration/Geometries_MC_cff'
1094 def inGeometryKeys(opt):
1095 from Configuration.StandardSequences.GeometryConf
import GeometryConf
1096 if opt
in GeometryConf:
1097 return GeometryConf[opt]
1101 geoms=self._options.geometry.split(
',')
1102 if len(geoms)==1: geoms=inGeometryKeys(geoms[0]).
split(
',')
1105 if '/' in geoms[1]
or '_cff' in geoms[1]:
1108 self.
GeometryCFF=
'Configuration/Geometry/Geometry'+geoms[1]+
'_cff'
1110 if (geoms[0].startswith(
'DB:')):
1115 if '/' in geoms[0]
or '_cff' in geoms[0]:
1118 simGeometry=geoms[0]
1119 if self._options.gflash==
True:
1120 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'GFlash_cff'
1122 self.
SimGeometryCFF=
'Configuration/Geometry/Geometry'+geoms[0]+
'_cff'
1125 if simGeometry
not in defaultOptions.geometryExtendedOptions:
1126 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimIdeal_cff"
1128 if self._options.scenario==
'nocoll' or self._options.scenario==
'cosmics':
1129 self.
SIMDefaultCFF=
"Configuration/StandardSequences/SimNOBEAM_cff"
1130 self._options.beamspot=
'NoSmear'
1133 if self._options.fast:
1134 self.
SIMDefaultCFF =
'FastSimulation.Configuration.SimIdeal_cff'
1135 self.
RECODefaultCFF=
'FastSimulation.Configuration.Reconstruction_AftMix_cff'
1142 if self._options.pileup==
'default':
1143 from Configuration.StandardSequences.Mixing
import MixingDefaultKey
1144 self._options.pileup=MixingDefaultKey
1148 if self._options.isData:
1149 self._options.pileup=
None
1157 output = cms.OutputModule(
"PoolOutputModule")
1158 if stream.selectEvents.parameters_().__len__()!=0:
1159 output.SelectEvents = stream.selectEvents
1161 output.SelectEvents = cms.untracked.PSet()
1162 output.SelectEvents.SelectEvents=cms.vstring()
1163 if isinstance(stream.paths,tuple):
1164 for path
in stream.paths:
1165 output.SelectEvents.SelectEvents.append(path.label())
1167 output.SelectEvents.SelectEvents.append(stream.paths.label())
1171 if isinstance(stream.content,str):
1172 evtPset=getattr(self.process,stream.content)
1173 for p
in evtPset.parameters_():
1174 setattr(output,p,getattr(evtPset,p))
1175 if not self._options.inlineEventContent:
1176 def doNotInlineEventContent(instance,label = "process."+stream.content+".outputCommands"):
1178 output.outputCommands.__dict__[
"dumpPython"] = doNotInlineEventContent
1180 output.outputCommands = stream.content
1183 output.fileName = cms.untracked.string(self._options.dirout+stream.name+
'.root')
1185 output.dataset = cms.untracked.PSet( dataTier = stream.dataTier,
1186 filterName = cms.untracked.string(stream.name))
1188 if self._options.filtername:
1189 output.dataset.filterName= cms.untracked.string(self._options.filtername+
"_"+stream.name)
1192 output.eventAutoFlushCompressedSize=cms.untracked.int32(5*1024*1024)
1194 if workflow
in (
"producers,full"):
1195 if isinstance(stream.paths,tuple):
1196 for path
in stream.paths:
1197 self.schedule.append(path)
1199 self.schedule.append(stream.paths)
1203 if (
not self._options.relval)
and workflow
in (
"full",
"output"):
1204 self.additionalOutputs[name] = output
1205 setattr(self.process,name,output)
1207 if workflow ==
'output':
1209 filterList = output.SelectEvents.SelectEvents
1210 for i, filter
in enumerate(filterList):
1211 filterList[i] = filter+
":"+self._options.triggerResultsProcess
1221 if ( len(sequence.split(
'.'))==1 ):
1223 elif ( len(sequence.split(
'.'))==2 ):
1225 sequence=sequence.split(
'.')[1]
1227 print(
"sub sequence configuration must be of the form dir/subdir/cff.a+b+c or cff.a")
1228 print(sequence,
"not recognized")
1235 for i,s
in enumerate(seq.split(
'*')):
1237 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, s) ))
1239 p=getattr(self.
process,prefix)
1240 tmp = getattr(self.
process, s)
1241 if isinstance(tmp, cms.Task):
1245 self.schedule.append(getattr(self.
process,prefix))
1251 self.conditionalPaths.append(prefix)
1252 setattr(self.
process,prefix,getattr(cms,what)( getattr(self.
process, seq) ))
1253 self.schedule.append(getattr(self.
process,prefix))
1255 for i,s
in enumerate(seq.split(
'+')):
1257 setattr(self.
process,sn,getattr(cms,what)( getattr(self.
process, s) ))
1258 self.schedule.append(getattr(self.
process,sn))
1272 """ Enrich the process with alca streams """
1274 sequence = sequence.split(
'.')[-1]
1277 alcaList = sequence.split(
"+")
1279 from Configuration.AlCa.autoAlca
import autoAlca, AlCaNoConcurrentLumis
1283 for name
in alcaConfig.__dict__:
1284 alcastream = getattr(alcaConfig,name)
1285 shortName = name.replace(
'ALCARECOStream',
'')
1286 if shortName
in alcaList
and isinstance(alcastream,cms.FilteredStream):
1287 if shortName
in AlCaNoConcurrentLumis:
1288 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of AlCa sequence {}".
format(shortName))
1289 self._options.nConcurrentLumis =
"1"
1290 self._options.nConcurrentIOVs =
"1"
1291 output = self.
addExtraStream(name,alcastream, workflow = workflow)
1292 self.
executeAndRemember(
'process.ALCARECOEventContent.outputCommands.extend(process.OutALCARECO'+shortName+
'_noDrop.outputCommands)')
1293 self.AlCaPaths.append(shortName)
1294 if 'DQM' in alcaList:
1295 if not self._options.inlineEventContent
and hasattr(self.
process,name):
1296 self.
executeAndRemember(
'process.' + name +
'.outputCommands.append("keep *_MEtoEDMConverter_*_*")')
1298 output.outputCommands.append(
"keep *_MEtoEDMConverter_*_*")
1301 if self._options.hltProcess
or 'HLT' in self.
stepMap:
1302 if isinstance(alcastream.paths,tuple):
1303 for path
in alcastream.paths:
1308 for i
in range(alcaList.count(shortName)):
1309 alcaList.remove(shortName)
1312 elif name ==
'pathALCARECODQM' and 'DQM' in alcaList:
1313 path = getattr(alcaConfig,name)
1314 self.schedule.append(path)
1315 alcaList.remove(
'DQM')
1317 if isinstance(alcastream,cms.Path):
1319 self.blacklist_paths.append(alcastream)
1322 if len(alcaList) != 0:
1324 for name
in alcaConfig.__dict__:
1325 alcastream = getattr(alcaConfig,name)
1326 if isinstance(alcastream,cms.FilteredStream):
1327 available.append(name.replace(
'ALCARECOStream',
''))
1328 print(
"The following alcas could not be found "+
str(alcaList))
1329 print(
"available ",available)
1331 raise Exception(
"The following alcas could not be found "+
str(alcaList))
1336 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'').
replace(
'/',
'.')
1337 print(
"Loading lhe fragment from",loadFragment)
1338 __import__(loadFragment)
1339 self.process.load(loadFragment)
1341 self._options.inlineObjets+=
','+sequence
1343 getattr(self.process,sequence).nEvents = int(self._options.number)
1346 self.process.lhe_step = cms.Path( getattr( self.process,sequence) )
1347 self.excludedPaths.append(
"lhe_step")
1348 self.schedule.append( self.process.lhe_step )
1351 """ load the fragment of generator configuration """
1356 loadFragment = self._options.evt_type.replace(
'.py',
'',).
replace(
'.',
'_').
replace(
'python/',
'')
1358 if not '/' in loadFragment:
1359 loadFragment=
'Configuration.Generator.'+loadFragment
1361 loadFragment=loadFragment.replace(
'/',
'.')
1363 print(
"Loading generator fragment from",loadFragment)
1364 __import__(loadFragment)
1368 if not (self._options.filein
or self._options.dasquery):
1369 raise Exception(
"Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")
1372 from Configuration.Generator.concurrentLumisDisable
import noConcurrentLumiGenerators
1374 generatorModule=sys.modules[loadFragment]
1375 genModules=generatorModule.__dict__
1381 if self._options.hideGen:
1384 self.process.load(loadFragment)
1386 import FWCore.ParameterSet.Modules
as cmstypes
1387 for name
in genModules:
1388 theObject = getattr(generatorModule,name)
1389 if isinstance(theObject, cmstypes._Module):
1390 self._options.inlineObjets=name+
','+self._options.inlineObjets
1391 if theObject.type_()
in noConcurrentLumiGenerators:
1392 print(
"Setting numberOfConcurrentLuminosityBlocks=1 because of generator {}".
format(theObject.type_()))
1393 self._options.nConcurrentLumis =
"1"
1394 self._options.nConcurrentIOVs =
"1"
1395 elif isinstance(theObject, cms.Sequence)
or isinstance(theObject, cmstypes.ESProducer):
1396 self._options.inlineObjets+=
','+name
1398 if sequence == self.
GENDefaultSeq or sequence ==
'pgen_genonly':
1399 if 'ProductionFilterSequence' in genModules
and (
'generator' in genModules):
1401 elif 'generator' in genModules:
1404 """ Enrich the schedule with the rest of the generation step """
1406 genSeqName=sequence.split(
'.')[-1]
1410 from Configuration.StandardSequences.VtxSmeared
import VtxSmeared
1411 cffToBeLoaded=VtxSmeared[self._options.beamspot]
1414 raise Exception(
"VertexSmearing type or beamspot "+self._options.beamspot+
" unknown.")
1416 if self._options.scenario ==
'HeavyIons':
1417 if self._options.pileup==
'HiMixGEN':
1418 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorMix_cff")
1419 elif self._options.pileup==
'HiMixEmbGEN':
1420 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorEmbMix_cff")
1422 self.
loadAndRemember(
"Configuration/StandardSequences/GeneratorHI_cff")
1424 self.process.generation_step = cms.Path( getattr(self.
process,genSeqName) )
1425 self.schedule.append(self.process.generation_step)
1428 self.
executeAndRemember(
'process.genstepfilter.triggerConditions=cms.vstring("generation_step")')
1434 """ Enrich the schedule with the summary of the filter step """
1441 """ Enrich the schedule with the simulation step"""
1443 if not self._options.fast:
1444 if self._options.gflash==
True:
1447 if self._options.magField==
'0T':
1450 if self._options.magField==
'0T':
1451 self.
executeAndRemember(
"process.fastSimProducer.detectorDefinition.magneticFieldZ = cms.untracked.double(0.)")
1457 """ Enrich the schedule with the digitisation step"""
1460 if self._options.gflash==
True:
1461 self.
loadAndRemember(
"Configuration/StandardSequences/GFlashDIGI_cff")
1463 if sequence ==
'pdigi_valid' or sequence ==
'pdigi_hi':
1464 self.
executeAndRemember(
"process.mix.digitizers = cms.PSet(process.theDigitizersValid)")
1466 if sequence !=
'pdigi_nogen' and sequence !=
'pdigi_valid_nogen' and sequence !=
'pdigi_hi_nogen' and not self.process.source.type_()==
'EmptySource' and not self._options.filetype ==
"LHE":
1467 if self._options.inputEventContent==
'':
1468 self._options.inputEventContent=
'REGEN'
1470 self._options.inputEventContent=self._options.inputEventContent+
',REGEN'
1477 """ Enrich the schedule with the crossing frame writer step"""
1483 """ Enrich the schedule with the digitisation step"""
1487 if self._options.pileup_input:
1489 if self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:'):
1490 theFiles=
filesFromDASQuery(
'file dataset = %s'%(self._options.pileup_input[4:],),self._options.pileup_dasoption)[0]
1491 elif self._options.pileup_input.startswith(
"filelist:"):
1492 theFiles= (
filesFromList(self._options.pileup_input[9:]))[0]
1494 theFiles=self._options.pileup_input.split(
',')
1496 self.
executeAndRemember(
"process.mixData.input.fileNames = cms.untracked.vstring(%s)"%( theFiles ) )
1511 """ Enrich the schedule with the L1 simulation step"""
1518 """ Enrich the schedule with the L1 simulation step, running the L1 emulator on data unpacked from the RAW collection, and repacking the result in a new RAW collection"""
1519 supported = [
'GT',
'GT1',
'GT2',
'GCTGT',
'Full',
'FullSimTP',
'FullMC',
'Full2015Data',
'uGT',
'CalouGT']
1520 if sequence
in supported:
1521 self.
loadAndRemember(
'Configuration/StandardSequences/SimL1EmulatorRepack_%s_cff'%sequence)
1522 if self._options.scenario ==
'HeavyIons':
1526 print(
"L1REPACK with '",sequence,
"' is not supported! Supported choices are: ",supported)
1530 """ Enrich the schedule with the HLT simulation step"""
1532 print(
"no specification of the hlt menu has been given, should never happen")
1533 raise Exception(
'no HLT sequence provided')
1537 from Configuration.HLT.autoHLT
import autoHLT
1540 sequence = autoHLT[key]
1542 raise ValueError(
'no HLT mapping key "%s" found in autoHLT' % key)
1548 if self._options.scenario ==
'HeavyIons':
1549 optionsForHLT[
'type'] =
'HIon'
1551 optionsForHLT[
'type'] =
'GRun'
1552 optionsForHLTConfig =
', '.
join(
'%s=%s' % (key, repr(val))
for (key, val)
in optionsForHLT.items())
1553 if sequence ==
'run,fromSource':
1554 if hasattr(self.process.source,
'firstRun'):
1555 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.firstRun.value()),%s)'%(optionsForHLTConfig))
1556 elif hasattr(self.process.source,
'setRunNumber'):
1557 self.
executeAndRemember(
'process.loadHltConfiguration("run:%%d"%%(process.source.setRunNumber.value()),%s)'%(optionsForHLTConfig))
1559 raise Exception(
'Cannot replace menu to load %s'%(sequence))
1561 self.
executeAndRemember(
'process.loadHltConfiguration("%s",%s)'%(sequence.replace(
',',
':'),optionsForHLTConfig))
1565 if self._options.isMC:
1566 self._options.customisation_file.append(
"HLTrigger/Configuration/customizeHLTforMC.customizeHLTforMC")
1568 if self._options.name !=
'HLT':
1569 self.additionalCommands.append(
'from HLTrigger.Configuration.CustomConfigs import ProcessName')
1570 self.additionalCommands.append(
'process = ProcessName(process)')
1571 self.additionalCommands.append(
'')
1572 from HLTrigger.Configuration.CustomConfigs
import ProcessName
1575 if self.process.schedule ==
None:
1576 raise Exception(
'the HLT step did not attach a valid schedule to the process')
1579 [self.blacklist_paths.append(path)
for path
in self.process.schedule
if isinstance(path,(cms.Path,cms.EndPath))]
1582 if self._options.fast:
1583 if not hasattr(self.
process,
'HLTEndSequence'):
1584 self.
executeAndRemember(
"process.HLTEndSequence = cms.Sequence( process.dummyModule )")
1589 seqReco=sequence.split(
',')[1]
1590 seqDigi=sequence.split(
',')[0]
1592 print(
"RAW2RECO requires two specifications",sequence,
"insufficient")
1606 self.
loadAndRemember(
"PhysicsTools/PatAlgos/slimming/metFilterPaths_cff")
1608 for filt
in allMetFilterPaths:
1609 self.schedule.append(getattr(self.
process,
'Flag_'+filt))
1612 ''' Enrich the schedule with L1 HW validation '''
1615 print(
'\n\n\n DEPRECATED this has no action \n\n\n')
1619 ''' Enrich the schedule with L1 reconstruction '''
1625 ''' Enrich the schedule with L1 reconstruction '''
1631 ''' Enrich the schedule with a user defined filter sequence '''
1633 filterConfig=self.
load(sequence.split(
'.')[0])
1634 filterSeq=sequence.split(
'.')[-1]
1636 class PrintAllModules(object):
1640 def enter(self,visitee):
1642 label=visitee.label()
1647 def leave(self,v):
pass
1649 expander=PrintAllModules()
1650 getattr(self.
process,filterSeq).visit( expander )
1651 self._options.inlineObjets+=
','+expander.inliner
1652 self._options.inlineObjets+=
','+filterSeq
1663 ''' Enrich the schedule with reconstruction '''
1669 ''' Enrich the schedule with reconstruction '''
1675 ''' Enrich the schedule with the part of reconstruction that is done before mixing in FastSim'''
1676 if not self._options.fast:
1677 print(
"ERROR: this step is only implemented for FastSim")
1680 self.
scheduleSequence(sequence.split(
'.')[-1],
'reconstruction_befmix_step')
1684 ''' Enrich the schedule with PAT '''
1687 self.labelsToAssociate.append(
'patTask')
1688 if self._options.isData:
1689 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllData")
1691 if self._options.fast:
1692 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMCFastSim")
1694 self._options.customisation_file_unsch.insert(0,
"PhysicsTools/PatAlgos/slimming/miniAOD_tools.miniAOD_customizeAllMC")
1696 if self._options.hltProcess:
1697 if len(self._options.customise_commands) > 1:
1698 self._options.customise_commands = self._options.customise_commands +
" \n"
1699 self._options.customise_commands = self._options.customise_commands +
"process.patTrigger.processName = \""+self._options.hltProcess+
"\"\n"
1700 self._options.customise_commands = self._options.customise_commands +
"process.slimmedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n"
1701 self._options.customise_commands = self._options.customise_commands +
"process.patMuons.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n"
1708 ''' Enrich the schedule with PATGEN '''
1710 self.labelsToAssociate.append(
'patGENTask')
1711 if self._options.isData:
1712 raise Exception(
"PATGEN step can only run on MC")
1716 ''' Enrich the schedule with NANO '''
1719 custom =
"nanoAOD_customizeData" if self._options.isData
else "nanoAOD_customizeMC"
1720 self._options.customisation_file.insert(0,
"PhysicsTools/NanoAOD/nano_cff."+custom)
1721 if self._options.hltProcess:
1722 if len(self._options.customise_commands) > 1:
1723 self._options.customise_commands = self._options.customise_commands +
" \n"
1724 self._options.customise_commands = self._options.customise_commands +
"process.unpackedPatTrigger.triggerResults= cms.InputTag( 'TriggerResults::"+self._options.hltProcess+
"' )\n"
1727 ''' Enrich the schedule with NANOGEN '''
1729 fromGen =
any([x
in self.
stepMap for x
in [
'LHE',
'GEN',
'AOD']])
1732 custom =
"customizeNanoGEN" if fromGen
else "customizeNanoGENFromMini"
1733 if self._options.runUnscheduled:
1739 ''' Enrich the schedule with event interpretation '''
1740 from Configuration.StandardSequences.EventInterpretation
import EventInterpretation
1741 if sequence
in EventInterpretation:
1743 sequence =
'EIsequence'
1745 raise Exception(
'Cannot set %s event interpretation'%( sequence) )
1751 ''' Enrich the schedule with skimming fragments'''
1753 sequence = sequence.split(
'.')[-1]
1755 skimlist=sequence.split(
'+')
1757 from Configuration.Skimming.autoSkim
import autoSkim
1761 for skim
in skimConfig.__dict__:
1762 skimstream = getattr(skimConfig,skim)
1763 if isinstance(skimstream,cms.Path):
1765 self.blacklist_paths.append(skimstream)
1766 if (
not isinstance(skimstream,cms.FilteredStream)):
1768 shortname = skim.replace(
'SKIMStream',
'')
1769 if (sequence==
"all"):
1771 elif (shortname
in skimlist):
1774 if self._options.datatier==
'DQM':
1776 skimstreamDQM = cms.FilteredStream(
1777 responsible = skimstream.responsible,
1778 name = skimstream.name+
'DQM',
1779 paths = skimstream.paths,
1780 selectEvents = skimstream.selectEvents,
1781 content = self._options.datatier+
'EventContent',
1782 dataTier = cms.untracked.string(self._options.datatier)
1785 for i
in range(skimlist.count(shortname)):
1786 skimlist.remove(shortname)
1790 if (skimlist.__len__()!=0
and sequence!=
"all"):
1791 print(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1792 raise Exception(
'WARNING, possible typo with SKIM:'+
'+'.
join(skimlist))
1795 ''' Enrich the schedule with a user defined sequence '''
1801 """ Enrich the schedule with the postreco step """
1808 print(sequence,
"in preparing validation")
1810 from Validation.Configuration.autoValidation
import autoValidation
1812 sequence=sequence.split(
'.')[-1]
1813 if sequence.find(
',')!=-1:
1814 prevalSeqName=sequence.split(
',')[0].
split(
'+')
1815 valSeqName=sequence.split(
',')[1].
split(
'+')
1820 prevalSeqName=sequence.split(
'+')
1821 valSeqName=sequence.split(
'+')
1827 postfix=
'_'+sequence
1828 prevalSeqName=[
'prevalidation'+postfix]
1829 valSeqName=[
'validation'+postfix]
1830 if not hasattr(self.
process,valSeqName[0]):
1832 valSeqName=[sequence]
1843 if (
'HLT' in self.
stepMap and not self._options.fast)
or self._options.hltProcess:
1844 for s
in valSeqName+prevalSeqName:
1847 for (i,s)
in enumerate(prevalSeqName):
1849 setattr(self.
process,
'prevalidation_step%s'%NFI(i), cms.Path( getattr(self.
process, s)) )
1850 self.schedule.append(getattr(self.
process,
'prevalidation_step%s'%NFI(i)))
1852 for (i,s)
in enumerate(valSeqName):
1853 setattr(self.
process,
'validation_step%s'%NFI(i), cms.EndPath( getattr(self.
process, s)))
1854 self.schedule.append(getattr(self.
process,
'validation_step%s'%NFI(i)))
1860 if not 'DIGI' in self.
stepMap and not self._options.fast
and not any(map(
lambda s : s.startswith(
'genvalid'), valSeqName)):
1861 if self._options.restoreRNDSeeds==
False and not self._options.restoreRNDSeeds==
True:
1862 self._options.restoreRNDSeeds=
True
1864 if not 'DIGI' in self.
stepMap and not self._options.isData
and not self._options.fast:
1868 self._options.customisation_file.append(
"SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn")
1870 if hasattr(self.
process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
1872 for (i,s)
in enumerate(valSeqName):
1873 getattr(self.
process,
'validation_step%s'%NFI(i)).
insert(0, self.process.genstepfilter)
1879 """Visitor that travels within a cms.Sequence, looks for a parameter and replace its value
1880 It will climb down within PSets, VPSets and VInputTags to find its target"""
1881 def __init__(self, paramSearch, paramReplace, verbose=False, whitelist=()):
1888 if isinstance(pset, cms._Parameterizable):
1889 for name
in pset.parameters_().
keys():
1895 value = getattr(pset,name)
1896 type = value.pythonTypeName()
1897 if type
in (
'cms.PSet',
'cms.untracked.PSet'):
1898 self.
doIt(value,base+
"."+name)
1899 elif type
in (
'cms.VPSet',
'cms.untracked.VPSet'):
1900 for (i,ps)
in enumerate(value): self.
doIt(ps,
"%s.%s[%d]"%(base,name,i) )
1901 elif type
in (
'cms.string',
'cms.untracked.string'):
1905 elif type
in (
'cms.VInputTag',
'cms.untracked.VInputTag'):
1906 for (i,n)
in enumerate(value):
1907 if not isinstance(n, cms.InputTag):
1914 elif type
in (
'cms.vstring',
'cms.untracked.vstring'):
1915 for (i,n)
in enumerate(value):
1918 elif type
in (
'cms.InputTag',
'cms.untracked.InputTag'):
1921 setattr(getattr(pset, name),
"processName",self.
_paramReplace)
1926 label = visitee.label()
1927 except AttributeError:
1928 label =
'<Module not in a Process>'
1930 label =
'other execption'
1931 self.
doIt(visitee, label)
1938 print(
"Replacing all InputTag %s => %s"%(oldT,newT))
1941 loadMe=
'from PhysicsTools.PatAlgos.tools.helpers import massSearchReplaceAnyInputTag'
1943 self.additionalCommands.append(loadMe)
1944 self.additionalCommands.append(
'massSearchReplaceAnyInputTag(process.%s,"%s","%s",False,True)'%(sequence,oldT,newT))
1948 if self._options.hltProcess:
1949 proc=self._options.hltProcess
1951 proc=self.process.name_()
1952 if proc==HLTprocess:
return
1954 print(
"replacing %s process name - sequence %s will use '%s'" % (HLTprocess,sequence, proc))
1956 if 'from Configuration.Applications.ConfigBuilder import ConfigBuilder' not in self.
additionalCommands:
1957 self.additionalCommands.append(
'from Configuration.Applications.ConfigBuilder import ConfigBuilder')
1958 self.additionalCommands.append(
'process.%s.visit(ConfigBuilder.MassSearchReplaceProcessNameVisitor("%s", "%s", whitelist = ("subSystemFolder",)))'% (sequence,HLTprocess, proc))
1964 while '@' in repr(seqList)
and level<maxLevel:
1966 for specifiedCommand
in seqList:
1967 if specifiedCommand.startswith(
'@'):
1968 location=specifiedCommand[1:]
1969 if not location
in mapping:
1970 raise Exception(
"Impossible to map "+location+
" from "+repr(mapping))
1971 mappedTo=mapping[location]
1973 mappedTo=mappedTo[index]
1974 seqList.remove(specifiedCommand)
1975 seqList.extend(mappedTo.split(
'+'))
1978 raise Exception(
"Could not fully expand "+repr(seqList)+
" from "+repr(mapping))
1986 sequenceList=sequence.split(
'.')[-1].
split(
'+')
1987 postSequenceList=sequence.split(
'.')[-1].
split(
'+')
1988 from DQMOffline.Configuration.autoDQM
import autoDQM
1992 if len(set(sequenceList))!=len(sequenceList):
1993 sequenceList=list(set(sequenceList))
1994 print(
"Duplicate entries for DQM:, using",sequenceList)
1996 pathName=
'dqmoffline_step'
1997 for (i,sequence)
in enumerate(sequenceList):
1999 pathName=
'dqmoffline_%d_step'%(i)
2001 if 'HLT' in self.stepMap.keys()
or self._options.hltProcess:
2004 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process,sequence ) ) )
2005 self.schedule.append(getattr(self.
process,pathName))
2007 if hasattr(self.
process,
"genstepfilter")
and len(self.process.genstepfilter.triggerConditions):
2009 getattr(self.
process,pathName).
insert(0,self.process.genstepfilter)
2012 pathName=
'dqmofflineOnPAT_step'
2013 for (i,sequence)
in enumerate(postSequenceList):
2015 if (sequenceList[i]==postSequenceList[i]):
2018 pathName=
'dqmofflineOnPAT_%d_step'%(i)
2020 setattr(self.
process,pathName, cms.EndPath( getattr(self.
process, sequence ) ) )
2021 self.schedule.append(getattr(self.
process,pathName))
2024 """ Enrich the process with harvesting step """
2025 self.
DQMSaverCFF=
'Configuration/StandardSequences/DQMSaver'+self._options.harvesting+
'_cff'
2029 sequence = sequence.split(
'.')[-1]
2032 harvestingList = sequence.split(
"+")
2033 from DQMOffline.Configuration.autoDQM
import autoDQM
2034 from Validation.Configuration.autoValidation
import autoValidation
2036 combined_mapping = copy.deepcopy( autoDQM )
2037 combined_mapping.update( autoValidation )
2038 self.
expandMapping(harvestingList,combined_mapping,index=-1)
2040 if len(set(harvestingList))!=len(harvestingList):
2041 harvestingList=list(set(harvestingList))
2042 print(
"Duplicate entries for HARVESTING, using",harvestingList)
2044 for name
in harvestingList:
2045 if not name
in harvestingConfig.__dict__:
2046 print(name,
"is not a possible harvesting type. Available are",harvestingConfig.__dict__.keys())
2050 harvestingstream = getattr(harvestingConfig,name)
2051 if isinstance(harvestingstream,cms.Path):
2052 self.schedule.append(harvestingstream)
2053 self.blacklist_paths.append(harvestingstream)
2054 if isinstance(harvestingstream,cms.Sequence):
2055 setattr(self.
process,name+
"_step",cms.Path(harvestingstream))
2056 self.schedule.append(getattr(self.
process,name+
"_step"))
2062 """ Enrich the process with AlCaHarvesting step """
2064 sequence=sequence.split(
".")[-1]
2067 harvestingList = sequence.split(
"+")
2071 from Configuration.AlCa.autoPCL
import autoPCL
2074 for name
in harvestingConfig.__dict__:
2075 harvestingstream = getattr(harvestingConfig,name)
2076 if name
in harvestingList
and isinstance(harvestingstream,cms.Path):
2077 self.schedule.append(harvestingstream)
2078 if isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_dbOutput"), cms.VPSet)
and \
2079 isinstance(getattr(harvestingConfig,
"ALCAHARVEST" + name +
"_metadata"), cms.VPSet):
2080 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.extend(process.ALCAHARVEST" + name +
"_dbOutput)")
2081 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.extend(process.ALCAHARVEST" + name +
"_metadata)")
2083 self.
executeAndRemember(
"process.PoolDBOutputService.toPut.append(process.ALCAHARVEST" + name +
"_dbOutput)")
2084 self.
executeAndRemember(
"process.pclMetadataWriter.recordsToMap.append(process.ALCAHARVEST" + name +
"_metadata)")
2085 harvestingList.remove(name)
2087 lastStep = getattr(harvestingConfig,
"ALCAHARVESTDQMSaveAndMetadataWriter")
2088 self.schedule.append(lastStep)
2090 if len(harvestingList) != 0
and 'dummyHarvesting' not in harvestingList :
2091 print(
"The following harvesting could not be found : ", harvestingList)
2092 raise Exception(
"The following harvesting could not be found : "+
str(harvestingList))
2102 self.process.reconstruction = cms.Path(self.process.reconstructionWithFamos)
2103 self.schedule.append(self.process.reconstruction)
2107 """ Add useful info for the production. """
2108 self.process.configurationMetadata=cms.untracked.PSet\
2109 (version=cms.untracked.string(
"$Revision: 1.19 $"),
2110 name=cms.untracked.string(
"Applications"),
2111 annotation=cms.untracked.string(evt_type+
" nevts:"+
str(evtnumber))
2114 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2119 self.
pythonCfgCode +=
"# using: \n# "+__version__[1:-1]+
"\n# "+__source__[1:-1]+
'\n'
2120 self.
pythonCfgCode +=
"# with command line options: "+self._options.arguments+
'\n'
2121 self.
pythonCfgCode +=
"import FWCore.ParameterSet.Config as cms\n\n"
2128 if hasattr(self.
_options,
"era")
and self._options.era :
2130 from Configuration.StandardSequences.Eras
import eras
2131 for requestedEra
in self._options.era.split(
",") :
2132 modifierStrings.append(requestedEra)
2133 modifierImports.append(eras.pythonCfgLines[requestedEra])
2134 modifiers.append(getattr(eras,requestedEra))
2137 if hasattr(self.
_options,
"procModifiers")
and self._options.procModifiers:
2140 for c
in self._options.procModifiers:
2141 thingsImported.extend(c.split(
","))
2142 for pm
in thingsImported:
2143 modifierStrings.append(pm)
2144 modifierImports.append(
'from Configuration.ProcessModifiers.'+pm+
'_cff import '+pm)
2145 modifiers.append(getattr(importlib.import_module(
'Configuration.ProcessModifiers.'+pm+
'_cff'),pm))
2148 self.
pythonCfgCode +=
"process = cms.Process('"+self._options.name+
"'"
2151 if len(modifierStrings)>0:
2158 if len(modifiers)>0:
2159 self.
process = cms.Process(self._options.name,*modifiers)
2161 self.
process = cms.Process(self._options.name)
2167 """ Prepare the configuration string and add missing pieces."""
2179 outputModuleCfgCode=
""
2180 if not 'HARVESTING' in self.stepMap.keys()
and not 'ALCAHARVEST' in self.stepMap.keys()
and not 'ALCAOUTPUT' in self.stepMap.keys()
and self.
with_output:
2185 self.
pythonCfgCode +=
"# import of standard configurations\n"
2190 if not hasattr(self.
process,
"configurationMetadata"):
2194 self.addedObjects.append((
"Production Info",
"configurationMetadata"))
2209 nl=sorted(self.additionalOutputs.keys())
2212 self.
pythonCfgCode +=
"process.%s = %s" %(name, output.dumpPython())
2213 tmpOut = cms.EndPath(output)
2214 setattr(self.
process,name+
'OutPath',tmpOut)
2215 self.schedule.append(tmpOut)
2223 for object
in self._options.inlineObjets.split(
','):
2226 if not hasattr(self.
process,object):
2227 print(
'cannot inline -'+object+
'- : not known')
2232 if self._options.pileup==
'HiMixEmbGEN':
2233 self.
pythonCfgCode +=
"\nprocess.generator.embeddingMode=cms.bool(True)\n"
2237 for path
in self.process.paths:
2241 for endpath
in self.process.endpaths:
2249 pathNames = [
'process.'+p.label_()
for p
in self.
schedule]
2250 if self.process.schedule ==
None:
2251 self.process.schedule = cms.Schedule()
2253 self.process.schedule.append(item)
2254 result =
'process.schedule = cms.Schedule('+
','.
join(pathNames)+
')\n'
2257 raise Exception(
'the schedule was imported from a cff in HLTrigger.Configuration, but the final index of the first HLT path is undefined')
2259 for index, item
in enumerate(self.
schedule):
2261 self.process.schedule.insert(index, item)
2263 self.process.schedule.append(item)
2265 result =
"# process.schedule imported from cff in HLTrigger.Configuration\n"
2267 result +=
'process.schedule.insert('+
str(index)+
', '+item+
')\n'
2274 self.process.schedule.associate(getattr(self.
process, labelToAssociate))
2275 self.
pythonCfgCode +=
'process.schedule.associate(process.' + labelToAssociate +
')\n'
2279 self.
pythonCfgCode+=
"from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask\n"
2282 if self._options.nThreads !=
"1":
2285 self.
pythonCfgCode +=
"process.options.numberOfThreads = "+self._options.nThreads+
"\n"
2286 self.
pythonCfgCode +=
"process.options.numberOfStreams = "+self._options.nStreams+
"\n"
2287 self.
pythonCfgCode +=
"process.options.numberOfConcurrentLuminosityBlocks = "+self._options.nConcurrentLumis+
"\n"
2288 self.
pythonCfgCode +=
"process.options.eventSetup.numberOfConcurrentIOVs = "+self._options.nConcurrentIOVs+
"\n"
2289 if int(self._options.nConcurrentLumis) > 1:
2290 self.
pythonCfgCode +=
"if hasattr(process, 'DQMStore'): process.DQMStore.assertLegacySafe=cms.untracked.bool(False)\n"
2291 self.process.options.numberOfThreads = int(self._options.nThreads)
2292 self.process.options.numberOfStreams = int(self._options.nStreams)
2293 self.process.options.numberOfConcurrentLuminosityBlocks = int(self._options.nConcurrentLumis)
2294 self.process.options.eventSetup.numberOfConcurrentIOVs = int(self._options.nConcurrentIOVs)
2296 if self._options.isRepacked:
2298 self.
pythonCfgCode +=
"from Configuration.Applications.ConfigBuilder import MassReplaceInputTag\n"
2299 self.
pythonCfgCode +=
"MassReplaceInputTag(process, new=\"rawDataMapperByLabel\", old=\"rawDataCollector\")\n"
2300 MassReplaceInputTag(self.
process, new=
"rawDataMapperByLabel", old=
"rawDataCollector")
2304 self.
pythonCfgCode +=
'# filter all path with the production filter sequence\n'
2312 for path
in self.process.paths:
2321 if self._options.runUnscheduled:
2322 print(
"--runUnscheduled is deprecated and not necessary anymore, and will be removed soon. Please update your command line.")
2331 if hasattr(self.
process,
"logErrorHarvester"):
2333 self.
pythonCfgCode +=
"\n#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule\n"
2334 self.
pythonCfgCode +=
"from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands\n"
2335 self.
pythonCfgCode +=
"process = customiseLogErrorHarvesterUsingOutputCommands(process)\n"
2336 from FWCore.Modules.logErrorHarvester_cff
import customiseLogErrorHarvesterUsingOutputCommands
2343 self.
pythonCfgCode +=
"\n# Add early deletion of temporary data products to reduce peak memory need\n"
2344 self.
pythonCfgCode +=
"from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete\n"
2345 self.
pythonCfgCode +=
"process = customiseEarlyDelete(process)\n"
2347 from Configuration.StandardSequences.earlyDeleteSettings_cff
import customiseEarlyDelete
2350 imports = cms.specialImportRegistry.getSpecialImports()
2351 if len(imports) > 0:
2353 index = self.pythonCfgCode.find(
"import FWCore.ParameterSet.Config")
2355 index = self.pythonCfgCode.find(
"\n",index)
2361 if self._options.io:
2363 if not self._options.io.endswith(
'.io'): self._option.io+=
'.io'
2364 io=open(self._options.io,
'w')
2366 if hasattr(self.process.source,
"fileNames"):
2367 if len(self.process.source.fileNames.value()):
2368 ioJson[
'primary']=self.process.source.fileNames.value()
2369 if hasattr(self.process.source,
"secondaryFileNames"):
2370 if len(self.process.source.secondaryFileNames.value()):
2371 ioJson[
'secondary']=self.process.source.secondaryFileNames.value()
2372 if self._options.pileup_input
and (self._options.pileup_input.startswith(
'dbs:')
or self._options.pileup_input.startswith(
'das:')):
2373 ioJson[
'pileup']=self._options.pileup_input[4:]
2374 for (o,om)
in self.process.outputModules_().
items():
2375 ioJson[o]=om.fileName.value()
2376 ioJson[
'GT']=self.process.GlobalTag.globaltag.value()
2380 io.write(json.dumps(ioJson))
def renameInputTagsInSequence
bool any(const std::vector< T > &v, const T &what)
inliner
load the relevant part
process
adding standard sequences might change the inputEventContent option and therefore needs to be finaliz...
def scheduleSequenceAtEnd
def customiseLogErrorHarvesterUsingOutputCommands
const uint16_t range(const Frame &aFrame)
def build_production_info
def prepare_L1TrackTrigger
scheduleIndexOfFirstHLTPath
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
productionFilterSequence
put it before all the other paths
runsAndWeights
drop LHEXMLStringProduct on input to save memory if appropriate
bool insert(Storage &iStorage, ItemType *iItem, const IdTag &iIdTag)
def associatePatAlgosToolsTask
static std::string join(char **cmd)
def massSearchReplaceAnyInputTag
def loadDefaultOrSpecifiedCFF
def renameHLTprocessInSequence
nextScheduleIsConditional
put the filtering path in the schedule