5 Scenario supporting proton collisions data scouting
7 Really against OO principles, but pragmatism should prevale, I guess.
13 from Configuration.DataProcessing.Reco
import Reco
15 from Configuration.DataProcessing.Utils
import stepALCAPRODUCER,addMonitoring,dictIO,dqmIOSource,harvestingMode,dqmSeq
17 from Configuration.PyReleaseValidation.ConfigBuilder
import ConfigBuilder
18 from Configuration.PyReleaseValidation.ConfigBuilder
import Options
19 from Configuration.PyReleaseValidation.ConfigBuilder
import defaultOptions
20 from Configuration.PyReleaseValidation.ConfigBuilder
import installFilteredStream
21 from Configuration.DataProcessing.RecoTLR
import customisePrompt,customiseExpress
27 Implement configuration building for data processing for proton
36 Collision data, data scouting (dst stream).
37 This method provides the scheleton process for the dataScouting.
39 I follow the structure of the package.
42 options.scenario = self.cbSc
43 options.__dict__.update(defaultOptions.__dict__)
44 options.step =
'DQM:DQM/DataScouting/dataScouting_cff.dataScoutingDQMSequence,ENDJOB'
46 options.conditions = globalTag
48 process = cms.Process(
'DataScouting')
49 cb =
ConfigBuilder(options, process = process, with_output =
True)
52 process.source = cms.Source(
"PoolSource",
53 fileNames = cms.untracked.vstring()
60 return cms.Process(
'Empty')
67 In this scheme this method does not make any sense, but I have to
68 override the Reco one.
87 Proton collisions data taking DQM Harvesting
90 options = defaultOptions
91 options.scenario = self.cbSc
92 options.step =
"HARVESTING"+
dqmSeq(args,
':DQMOffline')
93 options.name =
"EDMtoMEConvert"
94 options.conditions = globalTag
96 process = cms.Process(
"HARVESTING")
99 configBuilder.prepare()
109 Again the same thing.