CMS 3D CMS Logo

All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Groups Pages
Classes | Namespaces | Functions | Variables
submitPVValidationJobs.py File Reference

Go to the source code of this file.

Classes

class  submitPVValidationJobs.BetterConfigParser
 — Classes —############################ More...
 
class  submitPVValidationJobs.Job
 aux generator function to split lists based on http://sandrotosi.blogspot.com/2011/04/python-group-list-in-sub-lists-of-n.html about generators see also http://stackoverflow.com/questions/231767/the-python-yield-keyword-explained More...
 
class  submitPVValidationJobs.RefitType
 

Namespaces

 submitPVValidationJobs
 

Functions

def submitPVValidationJobs.__del__
 
def submitPVValidationJobs.__init__
 
def submitPVValidationJobs.as_dict
 
def submitPVValidationJobs.check_proxy
 
def submitPVValidationJobs.ConfigSectionMap
 
def submitPVValidationJobs.createTheBashFile
 
def submitPVValidationJobs.createTheCfgFile
 
def submitPVValidationJobs.createTheLSFFile
 
def submitPVValidationJobs.forward_proxy
 
def submitPVValidationJobs.getBatchjobId
 
def submitPVValidationJobs.getCommandOutput
 
def submitPVValidationJobs.getFilesForRun
 
def submitPVValidationJobs.getLuminosity
 
def submitPVValidationJobs.getNEvents
 
def submitPVValidationJobs.getOutputBaseName
 
def submitPVValidationJobs.getOutputBaseNameWithData
 
def submitPVValidationJobs.getOutputFileName
 
def submitPVValidationJobs.isInJSON
 
def submitPVValidationJobs.main
 
def submitPVValidationJobs.mkdir_eos
 method to create recursively directories on EOS ############# More...
 
def submitPVValidationJobs.setEOSout
 
def submitPVValidationJobs.split
 
def submitPVValidationJobs.submit
 
def submitPVValidationJobs.to_bool
 
def submitPVValidationJobs.updateDB
 
def submitPVValidationJobs.updateDB2
 
def submitPVValidationJobs.write_HTCondor_submit_file
 

Variables

string submitPVValidationJobs.__author__ = 'Marco Musich'
 
string submitPVValidationJobs.__copyright__ = 'Copyright 2020, CERN CMS'
 
list submitPVValidationJobs.__credits__ = ['Ernesto Migliore', 'Salvatore Di Guida']
 
string submitPVValidationJobs.__email__ = 'marco.musich@cern.ch'
 
string submitPVValidationJobs.__license__ = 'Unknown'
 
string submitPVValidationJobs.__maintainer__ = 'Marco Musich'
 
int submitPVValidationJobs.__version__ = 1
 
tuple submitPVValidationJobs.aJob
 
list submitPVValidationJobs.alignmentDB = []
 
list submitPVValidationJobs.alignmentTAG = []
 
list submitPVValidationJobs.allFromGT = []
 
tuple submitPVValidationJobs.AnalysisStep_dir = os.path.join(input_CMSSW_BASE,"src/Alignment/OfflineValidation/test")
 
list submitPVValidationJobs.apeDB = []
 
list submitPVValidationJobs.apeTAG = []
 
list submitPVValidationJobs.applyBOWS = []
 
list submitPVValidationJobs.applyEXTRACOND = []
 
list submitPVValidationJobs.applyruncontrol = []
 
list submitPVValidationJobs.batchJobIds = []
 declare here the list of jobs that should be waited for More...
 
list submitPVValidationJobs.bowDB = []
 
list submitPVValidationJobs.bowTAG = []
 
tuple submitPVValidationJobs.child = os.popen(command)
 
tuple submitPVValidationJobs.child1 = os.system(submitcommand1)
 
string submitPVValidationJobs.cmd = 'dasgoclient -limit=0 -query \'run dataset='
 
string submitPVValidationJobs.cmd2 = ' dasgoclient -limit=0 -query \'file run='
 
list submitPVValidationJobs.conditions = []
 
tuple submitPVValidationJobs.config = BetterConfigParser()
 
string submitPVValidationJobs.config_txt = '\n\n'
 
 submitPVValidationJobs.ConfigFile = opts.inputconfig
 
string submitPVValidationJobs.CopyRights = '##################################\n'
 
tuple submitPVValidationJobs.count = pool.map(getFilesForRun,zip(mytuple,instances))
 
tuple submitPVValidationJobs.data = child.read()
 
string submitPVValidationJobs.dbName = "runInfo.pkl"
 
string submitPVValidationJobs.desc = """This is a description of %prog."""
 
dictionary submitPVValidationJobs.dictionary = {}
 
list submitPVValidationJobs.doRunBased = []
 
tuple submitPVValidationJobs.eosdir = os.path.join("/store/group/alca_trackeralign",USER,"test_out",t)
 
tuple submitPVValidationJobs.err = child.close()
 
list submitPVValidationJobs.extraCondVect = []
 
tuple submitPVValidationJobs.file_info = dict(zip(listOfRuns, count))
 
tuple submitPVValidationJobs.fin = open(template_cfg_file)
 
string submitPVValidationJobs.flavour = "tomorrow"
 
tuple submitPVValidationJobs.fout = open(os.path.join(self.cfg_dir,self.outputCfgName),'w')
 
list submitPVValidationJobs.gt = []
 
tuple submitPVValidationJobs.hadd_script_file = os.path.join(scripts_dir,jobName[iConf]+"_"+opts.taskname+".sh")
 
string submitPVValidationJobs.harvest_conditions = '"'
 
tuple submitPVValidationJobs.HOME = os.environ.get('HOME')
 check first there is a valid grid proxy More...
 
dictionary submitPVValidationJobs.infos = {}
 
tuple submitPVValidationJobs.input_CMSSW_BASE = os.environ.get('CMSSW_BASE')
 
tuple submitPVValidationJobs.inputDict = as_dict(config)
 
list submitPVValidationJobs.inputFiles = []
 
list submitPVValidationJobs.instances = [opts.instance for entry in mytuple]
 
list submitPVValidationJobs.isDA = []
 
list submitPVValidationJobs.isMC = []
 
 submitPVValidationJobs.job_name = self.output_full_name
 
tuple submitPVValidationJobs.job_submit_file = os.path.join(path, "job_"+name+".submit")
 
string submitPVValidationJobs.job_submit_template
 
tuple submitPVValidationJobs.jobm = os.path.abspath(path)
 
list submitPVValidationJobs.jobName = []
 Initialize all the variables. More...
 
tuple submitPVValidationJobs.jsonDATA = json.load(myJSON)
 
string submitPVValidationJobs.lastJobCommand = "bsub -o harvester"
 
tuple submitPVValidationJobs.lastJobOutput = getCommandOutput(lastJobCommand)
 
tuple submitPVValidationJobs.lfn_with_quotes = map(lambda x: "\'"+x+"\'",lfn)
 
tuple submitPVValidationJobs.lib_path = os.path.abspath(AnalysisStep_dir)
 
tuple submitPVValidationJobs.listOfRuns = out.decode()
 
tuple submitPVValidationJobs.listOfValidations = config.getResultingSection("validations")
 
tuple submitPVValidationJobs.local_proxy = subprocess.check_output(["voms-proxy-info", "--path"])
 
tuple submitPVValidationJobs.log_dir = os.path.join(self.the_dir,"log")
 
list submitPVValidationJobs.lumilist = []
 
tuple submitPVValidationJobs.lumiToCache = line.split(",")
 
list submitPVValidationJobs.maxevents = []
 
 submitPVValidationJobs.mergedFile = None
 
dictionary submitPVValidationJobs.myCachedLumi = {}
 
dictionary submitPVValidationJobs.mylist = {}
 
tuple submitPVValidationJobs.myLumiDB = getLuminosity(HOME,myRuns[0],myRuns[-1],doRunBased,opts.verbose)
 check that the list of runs is not empty More...
 
list submitPVValidationJobs.myRuns = []
 
list submitPVValidationJobs.mytuple = []
 
tuple submitPVValidationJobs.nEvents = subprocess.check_output(["das_client", "--limit", "0", "--query", "summary run={} dataset={} | grep summary.nevents".format(run, dataset)])
 
tuple submitPVValidationJobs.njobs = str(nruns)
 
tuple submitPVValidationJobs.now = datetime.datetime.now()
 
tuple submitPVValidationJobs.od = collections.OrderedDict(sorted(file_info.items()))
 
string submitPVValidationJobs.out = name+"_$(ProcId)"
 
tuple submitPVValidationJobs.output = subprocess.check_output([homedir+"/.local/bin/brilcalc", "lumi", "-b", "STABLE BEAMS","-u", "/pb", "--begin", str(minRun),"--end",str(maxRun),"--output-style","csv","-c","web"])
 
tuple submitPVValidationJobs.output_file_list1 = list()
 
tuple submitPVValidationJobs.output_file_list2 = list()
 
tuple submitPVValidationJobs.outputList = out.decode()
 
tuple submitPVValidationJobs.p = Popen(cmd , shell=True, stdout=PIPE, stderr=PIPE)
 
list submitPVValidationJobs.params = self.extraCondVect[element]
 
tuple submitPVValidationJobs.parser = OptionParser(description=desc,version='%prog version 0.1')
 
tuple submitPVValidationJobs.pool = multiprocessing.Pool(processes=20)
 
 submitPVValidationJobs.proxy = proxy_path))
 
list submitPVValidationJobs.ptcut = []
 
tuple submitPVValidationJobs.q = Popen(cmd2 , shell=True, stdout=PIPE, stderr=PIPE)
 
list submitPVValidationJobs.refittertype = []
 
tuple submitPVValidationJobs.run = runFromFilename(f)
 
list submitPVValidationJobs.runboundary = []
 
dictionary submitPVValidationJobs.runInfo = {}
 
tuple submitPVValidationJobs.runToCache = line.split(",")
 
tuple submitPVValidationJobs.s = Popen(cmd , shell=True, stdout=PIPE, stderr=PIPE)
 
tuple submitPVValidationJobs.scripts_dir = os.path.join(AnalysisStep_dir,"scripts")
 print "==========>",conditions More...
 
tuple submitPVValidationJobs.splitList = split(mylist,10)
 
list submitPVValidationJobs.srcFiles = []
 N.B. More...
 
 submitPVValidationJobs.stdout = dump)
 
string submitPVValidationJobs.submissionCommand = "condor_submit "
 
tuple submitPVValidationJobs.submissionOutput = getCommandOutput(submissionCommand)
 
string submitPVValidationJobs.submitcommand1 = "chmod u+x "
 
string submitPVValidationJobs.t = ""
 
tuple submitPVValidationJobs.template_cfg_file = os.path.join(self.the_dir,"PVValidation_T_cfg.py")
 
 submitPVValidationJobs.theBaseName = None
 
 submitPVValidationJobs.theBashDir = None
 
tuple submitPVValidationJobs.theDataSet = dataset.split("/")
 
 submitPVValidationJobs.thejobIndex = None
 
string submitPVValidationJobs.theLumi = '1'
 
int submitPVValidationJobs.totalJobs = 0
 
list submitPVValidationJobs.tracktype = []
 
list submitPVValidationJobs.ttrhtype = []
 
tuple submitPVValidationJobs.USER = os.environ.get('USER')
 
list submitPVValidationJobs.vertextype = []