CMS 3D CMS Logo

/data/refman/pasoursint/CMSSW_5_3_10_patch2/src/Configuration/DataProcessing/python/Impl/DataScouting.py

Go to the documentation of this file.
00001 #!/usr/bin/env python
00002 """
00003 _DataScouting_
00004 
00005 Scenario supporting proton collisions data scouting
00006 Inheriting to reco.
00007 Really against OO principles, but pragmatism should prevale, I guess.
00008 """
00009 
00010 import os
00011 import sys
00012 
00013 from Configuration.DataProcessing.Reco import Reco
00014 
00015 from Configuration.DataProcessing.Utils import stepALCAPRODUCER,addMonitoring,dictIO,dqmIOSource,harvestingMode,dqmSeq
00016 import FWCore.ParameterSet.Config as cms
00017 from Configuration.PyReleaseValidation.ConfigBuilder import ConfigBuilder
00018 from Configuration.PyReleaseValidation.ConfigBuilder import Options
00019 from Configuration.PyReleaseValidation.ConfigBuilder import defaultOptions
00020 from Configuration.PyReleaseValidation.ConfigBuilder import installFilteredStream
00021 from Configuration.DataProcessing.RecoTLR import customisePrompt,customiseExpress
00022 
00023 class DataScouting(Reco):
00024     """
00025     _DataScouting_
00026 
00027     Implement configuration building for data processing for proton
00028     collision data taking
00029 
00030     """
00031 
00032     def promptReco(self, globalTag, **args):
00033         """
00034         _promptReco_
00035 
00036         Collision data, data scouting (dst stream).
00037         This method provides the scheleton process for the dataScouting.
00038         dpiparo 17-7-2012
00039         I follow the structure of the package.
00040         """
00041         options = Options()
00042         options.scenario = self.cbSc
00043         options.__dict__.update(defaultOptions.__dict__)
00044         options.step = 'DQM:DQM/DataScouting/dataScouting_cff.dataScoutingDQMSequence,ENDJOB'
00045         dictIO(options,args)        
00046         options.conditions = globalTag
00047                 
00048         process = cms.Process('DataScouting')
00049         cb = ConfigBuilder(options, process = process, with_output = True)
00050 
00051         # Input source
00052         process.source = cms.Source("PoolSource",
00053             fileNames = cms.untracked.vstring()
00054         )
00055         cb.prepare()
00056         
00057         return process        
00058 
00059     def __getEmptyProcess(self):
00060       return cms.Process('Empty')
00061       
00062 
00063     def expressProcessing(self, globalTag, **args):
00064         """
00065         _expressProcessing_
00066 
00067         In this scheme this method does not make any sense, but I have to 
00068         override the Reco one.
00069 
00070         """       
00071         return self.__getEmptyProcess()
00072 
00073 
00074     def alcaSkim(self, skims, **args):
00075         """
00076         _alcaSkim_
00077 
00078         Same as above
00079 
00080         """
00081         return self.__getEmptyProcess()
00082 
00083     def dqmHarvesting(self, datasetName, runNumber, globalTag, **args):
00084         """
00085         _dqmHarvesting_
00086 
00087         Proton collisions data taking DQM Harvesting
00088 
00089         """
00090         options = defaultOptions
00091         options.scenario = self.cbSc
00092         options.step = "HARVESTING"+dqmSeq(args,':DQMOffline')
00093         options.name = "EDMtoMEConvert"
00094         options.conditions = globalTag
00095  
00096         process = cms.Process("HARVESTING")
00097         process.source = dqmIOSource(args)
00098         configBuilder = ConfigBuilder(options, process = process)
00099         configBuilder.prepare()
00100 
00101         harvestingMode(process,datasetName,args,rANDl=False)
00102         return process
00103 
00104 
00105     def alcaHarvesting(self, globalTag, datasetName, **args):
00106         """
00107         _alcaHarvesting_
00108 
00109         Again the same thing.
00110 
00111         """
00112         return self.__getEmptyProcess()
00113