CMS 3D CMS Logo

/data/refman/pasoursint/CMSSW_5_3_4/src/Configuration/DataProcessing/python/Impl/Test.py

Go to the documentation of this file.
00001 #!/usr/bin/env python
00002 """
00003 _Test_
00004 
00005 Test Scenario implementation for unittests/development purposes
00006 
00007 Not for use with data taking 
00008 
00009 """
00010 
00011 
00012 from Configuration.DataProcessing.Scenario import Scenario
00013 import FWCore.ParameterSet.Config as cms
00014 
00015 class Test(Scenario):
00016     """
00017     _Test_
00018 
00019     Test Scenario
00020 
00021     """
00022 
00023     
00024     def promptReco(self, globalTag):
00025         """
00026         _promptReco_
00027 
00028         Returns skeleton process object
00029 
00030         """
00031         return cms.Process("RECO")
00032 
00033 
00034     def expressProcessing(self, globalTag):
00035         """
00036         _expressProcessing_
00037 
00038         Returns skeleton process object
00039 
00040         """
00041         return cms.Process("Express")
00042 
00043 
00044     def alcaSkim(self, skims):
00045         """
00046         _alcaSkim_
00047 
00048         Returns skeleton process object
00049 
00050         """
00051         return cms.Process("ALCARECO")
00052         
00053         
00054     def dqmHarvesting(self, datasetName, runNumber,  globalTag, **args):
00055         """
00056         _dqmHarvesting_
00057 
00058         build a DQM Harvesting configuration
00059 
00060         this method can be used to test an extra scenario, all the 
00061         ConfigBuilder options can be overwritten by using **args. This will be
00062         useful for testing with real jobs.
00063 
00064         Arguments:
00065         
00066         datasetName - aka workflow name for DQMServer, this is the name of the
00067         dataset containing the harvested run
00068         runNumber - The run being harvested
00069         globalTag - The global tag being used
00070         inputFiles - The list of LFNs being harvested
00071 
00072         """
00073         options = defaultOptions
00074         options.scenario = "cosmics"
00075         options.step = "HARVESTING:dqmHarvesting"
00076         options.isMC = False
00077         options.isData = True
00078         options.beamspot = None
00079         options.eventcontent = None
00080         options.name = "EDMtoMEConvert"
00081         options.conditions = "FrontierConditions_GlobalTag,%s" % globalTag
00082         options.arguments = ""
00083         options.evt_type = ""
00084         options.filein = []
00085 
00086         options.__dict__.update(args)
00087  
00088         process = cms.Process("HARVESTING")
00089         process.source = cms.Source("PoolSource")
00090         configBuilder = ConfigBuilder(options, process = process)
00091         configBuilder.prepare()
00092 
00093         #
00094         # customise process for particular job
00095         #
00096         process.source.processingMode = cms.untracked.string('RunsAndLumis')
00097         process.source.fileNames = cms.untracked(cms.vstring())
00098         process.maxEvents.input = -1
00099         process.dqmSaver.workflow = datasetName
00100         if args.has_key('saveByLumiSection') and \
00101                 args.get('saveByLumiSection', ''):
00102             process.dqmSaver.saveByLumiSection = int(args['saveByLumiSection'])
00103         if args.has_key('referenceFile') and args.get('referenceFile', ''):
00104             process.DQMStore.referenceFileName = \
00105                                 cms.untracked.string(args['referenceFile'])
00106 
00107         return process
00108 
00109 
00110     def skimming(self, *skims):
00111         """
00112         _skimming_
00113 
00114         Returns skeleton process object
00115 
00116         """
00117         return cms.Process("Skimming")