5 Scenario supporting proton collisions data scouting 7 Really against OO principles, but pragmatism should prevale, I guess. 14 from Configuration.DataProcessing.Utils
import stepALCAPRODUCER,addMonitoring,dictIO,dqmIOSource,harvestingMode,dqmSeq,gtNameAndConnect
15 import FWCore.ParameterSet.Config
as cms
16 from Configuration.DataProcessing.RecoTLR
import customisePrompt,customiseExpress
20 Scenario.__init__(self)
24 Implement configuration building for data processing for proton 33 Collision data, data scouting (dst stream). 34 This method provides the scheleton process for the dataScouting. 36 I follow the structure of the package. 39 options.scenario =
'pp' 40 options.__dict__.update(defaultOptions.__dict__)
41 options.step =
'DQM:DQM/DataScouting/dataScouting_cff.dataScoutingDQMSequence,ENDJOB' 45 process = cms.Process(
'DataScouting', self.eras)
46 cb =
ConfigBuilder(options, process = process, with_output =
True)
49 process.source = cms.Source(
"PoolSource",
50 fileNames = cms.untracked.vstring()
60 Proton collisions data taking DQM Harvesting 63 options = defaultOptions
64 options.scenario =
'pp' 65 options.step =
"HARVESTING"+
dqmSeq(args,
':DQMOffline')
66 options.name =
"EDMtoMEConvert" 69 process = cms.Process(
"HARVESTING", self.eras)
72 configBuilder.prepare()
def promptReco(self, globalTag, args)
def harvestingMode(process, datasetName, args, rANDl=True)
def dqmSeq(args, default)
def gtNameAndConnect(globalTag, args)
def dictIO(options, args)
def dqmHarvesting(self, datasetName, runNumber, globalTag, args)