CMS 3D CMS Logo

Classes | Namespaces | Functions | Variables
submitPVValidationJobs.py File Reference

Go to the source code of this file.

Classes

class  submitPVValidationJobs.BetterConfigParser
 — Classes —############################ More...
 
class  submitPVValidationJobs.Job
 
class  submitPVValidationJobs.RefitType
 

Namespaces

 submitPVValidationJobs
 

Functions

def submitPVValidationJobs.__del__ (self)
 
def submitPVValidationJobs.__init__ (self, dataset, job_number, job_id, job_name, isDA, isMC, applyBOWS, applyEXTRACOND, extraconditions, runboundary, lumilist, intlumi, maxevents, gt, allFromGT, alignmentDB, alignmentTAG, apeDB, apeTAG, bowDB, bowTAG, vertextype, tracktype, refittertype, ttrhtype, applyruncontrol, ptcut, CMSSW_dir, the_dir)
 
def submitPVValidationJobs.as_dict (config)
 
def submitPVValidationJobs.check_proxy ()
 
def submitPVValidationJobs.ConfigSectionMap (config, section)
 
def submitPVValidationJobs.createTheBashFile (self)
 
def submitPVValidationJobs.createTheCfgFile (self, lfn)
 
def submitPVValidationJobs.createTheLSFFile (self)
 
def submitPVValidationJobs.forward_proxy (rundir)
 
def submitPVValidationJobs.getBatchjobId (self)
 
def submitPVValidationJobs.getCommandOutput (command)
 
def submitPVValidationJobs.getFilesForRun (blob)
 
def submitPVValidationJobs.getLuminosity (homedir, minRun, maxRun, isRunBased, verbose)
 
def submitPVValidationJobs.getNEvents (run, dataset)
 
def submitPVValidationJobs.getOutputBaseName (self)
 
def submitPVValidationJobs.getOutputBaseNameWithData (self)
 
def submitPVValidationJobs.getOutputFileName (self)
 
def submitPVValidationJobs.isInJSON (run, jsonfile)
 
def submitPVValidationJobs.main ()
 
def submitPVValidationJobs.mkdir_eos (out_path)
 method to create recursively directories on EOS ############# More...
 
def submitPVValidationJobs.setEOSout (self, theEOSdir)
 
def submitPVValidationJobs.split (sequence, size)
 
def submitPVValidationJobs.submit (self)
 
def submitPVValidationJobs.to_bool (value)
 
def submitPVValidationJobs.updateDB (run, runInfo)
 
def submitPVValidationJobs.updateDB2 ()
 
def submitPVValidationJobs.write_HTCondor_submit_file (path, name, nruns, proxy_path=None)
 

Variables

string submitPVValidationJobs.__author__ = 'Marco Musich'
 
string submitPVValidationJobs.__copyright__ = 'Copyright 2020, CERN CMS'
 
list submitPVValidationJobs.__credits__ = ['Ernesto Migliore', 'Salvatore Di Guida']
 
string submitPVValidationJobs.__email__ = 'marco.musich@cern.ch'
 
string submitPVValidationJobs.__license__ = 'Unknown'
 
string submitPVValidationJobs.__maintainer__ = 'Marco Musich'
 
int submitPVValidationJobs.__version__ = 1
 
 submitPVValidationJobs.action
 
 submitPVValidationJobs.aJob
 
 submitPVValidationJobs.alignmentDB = []
 
 submitPVValidationJobs.alignmentTAG = []
 
 submitPVValidationJobs.allFromGT = []
 
 submitPVValidationJobs.AnalysisStep_dir = os.path.join(input_CMSSW_BASE,"src/Alignment/OfflineValidation/test")
 
 submitPVValidationJobs.apeDB = []
 
 submitPVValidationJobs.apeTAG = []
 
 submitPVValidationJobs.applyBOWS = []
 
 submitPVValidationJobs.applyEXTRACOND = []
 
 submitPVValidationJobs.applyruncontrol = []
 
 submitPVValidationJobs.args
 
 submitPVValidationJobs.BASH_dir
 
 submitPVValidationJobs.batch_job_id
 
list submitPVValidationJobs.batchJobIds = []
 declare here the list of jobs that should be waited for More...
 
 submitPVValidationJobs.bowDB = []
 
 submitPVValidationJobs.bowTAG = []
 
 submitPVValidationJobs.cfg_dir
 
 submitPVValidationJobs.child = os.popen(command)
 
 submitPVValidationJobs.child1 = os.system(submitcommand1)
 
string submitPVValidationJobs.cmd = 'dasgoclient -limit=0 -query \'run dataset='+opts.data + (' instance='+opts.instance+'\'' if (opts.instance is not None) else '\'')
 
string submitPVValidationJobs.cmd2 = ' dasgoclient -limit=0 -query \'file run='+blob[0][0]+' dataset='+blob[0][1]+ (' instance='+blob[1]+'\'' if (blob[1] is not None) else '\'')
 
 submitPVValidationJobs.CMSSW_dir
 
list submitPVValidationJobs.conditions = []
 
 submitPVValidationJobs.config = BetterConfigParser()
 
string submitPVValidationJobs.config_txt = '\n\n' + CopyRights + '\n\n'
 
 submitPVValidationJobs.ConfigFile = opts.inputconfig
 
string submitPVValidationJobs.CopyRights = '##################################\n'
 
 submitPVValidationJobs.count = pool.map(getFilesForRun,zip(mytuple,instances))
 
 submitPVValidationJobs.data = child.read()
 
string submitPVValidationJobs.dbName = "runInfo.pkl"
 
 submitPVValidationJobs.default
 
string submitPVValidationJobs.desc = """This is a description of %prog."""
 
 submitPVValidationJobs.dest
 
dictionary submitPVValidationJobs.dictionary = {}
 
list submitPVValidationJobs.doRunBased = []
 
 submitPVValidationJobs.dump
 
 submitPVValidationJobs.eosdir = os.path.join("/store/group/alca_trackeralign",USER,"test_out",t)
 
 submitPVValidationJobs.err = child.close()
 
 submitPVValidationJobs.extraCondVect = []
 
 submitPVValidationJobs.file_info = dict(zip(listOfRuns, count))
 
 submitPVValidationJobs.fin = open(template_cfg_file)
 
 submitPVValidationJobs.fout = open(os.path.join(self.cfg_dir,self.outputCfgName),'w')
 
 submitPVValidationJobs.gt = []
 
 submitPVValidationJobs.hadd_script_file = os.path.join(scripts_dir,jobName[iConf]+"_"+opts.taskname+".sh")
 
string submitPVValidationJobs.harvest_conditions = '"' + " && ".join(["ended(" + jobId + ")" for jobId in batchJobIds]) + '"'
 
 submitPVValidationJobs.help
 
 submitPVValidationJobs.HOME = os.environ.get('HOME')
 check first there is a valid grid proxy More...
 
dictionary submitPVValidationJobs.infos = {}
 
 submitPVValidationJobs.input_CMSSW_BASE = os.environ.get('CMSSW_BASE')
 
def submitPVValidationJobs.inputDict = as_dict(config)
 
list submitPVValidationJobs.inputFiles = []
 
list submitPVValidationJobs.instances = [opts.instance for entry in mytuple]
 
 submitPVValidationJobs.intlumi
 
 submitPVValidationJobs.isDA = []
 
 submitPVValidationJobs.isMC = []
 
 submitPVValidationJobs.job_id
 
 submitPVValidationJobs.job_name = self.output_full_name
 
 submitPVValidationJobs.job_number
 
 submitPVValidationJobs.job_submit_file = os.path.join(path, "job_"+name+".submit")
 
string submitPVValidationJobs.job_submit_template
 
list submitPVValidationJobs.jobName = []
 Initialize all the variables. More...
 
 submitPVValidationJobs.jsonDATA = json.load(myJSON)
 
string submitPVValidationJobs.lastJobCommand = "bsub -o harvester"+opts.taskname+".tmp -q 1nh -w "+harvest_conditions+" "+hadd_script_file
 
def submitPVValidationJobs.lastJobOutput = getCommandOutput(lastJobCommand)
 
 submitPVValidationJobs.lfn_list
 
 submitPVValidationJobs.lfn_with_quotes = map(lambda x: "\'"+x+"\'",lfn)
 
 submitPVValidationJobs.lib_path = os.path.abspath(AnalysisStep_dir)
 
def submitPVValidationJobs.listOfRuns = out.decode().split("\n")
 
 submitPVValidationJobs.listOfValidations = config.getResultingSection("validations")
 
 submitPVValidationJobs.local_proxy = subprocess.check_output(["voms-proxy-info", "--path"]).strip()
 
 submitPVValidationJobs.log_dir = os.path.join(self.the_dir,"log")
 
 submitPVValidationJobs.LSF_dir
 
 submitPVValidationJobs.lumilist = []
 
 submitPVValidationJobs.lumiToCache = line.split(",")[-1].replace("\r", "")
 
 submitPVValidationJobs.maxevents = []
 
 submitPVValidationJobs.mergedFile = None
 
dictionary submitPVValidationJobs.myCachedLumi = {}
 
dictionary submitPVValidationJobs.mylist = {}
 
def submitPVValidationJobs.myLumiDB = getLuminosity(HOME,myRuns[0],myRuns[-1],doRunBased,opts.verbose)
 check that the list of runs is not empty More...
 
list submitPVValidationJobs.myRuns = []
 
list submitPVValidationJobs.mytuple = []
 
 submitPVValidationJobs.nEvents = subprocess.check_output(["das_client", "--limit", "0", "--query", "summary run={} dataset={} | grep summary.nevents".format(run, dataset)])
 
 submitPVValidationJobs.now = datetime.datetime.now()
 
 submitPVValidationJobs.od = collections.OrderedDict(sorted(file_info.items()))
 
 submitPVValidationJobs.opts
 
 submitPVValidationJobs.out
 
 submitPVValidationJobs.OUTDIR
 
 submitPVValidationJobs.output = subprocess.check_output([homedir+"/.local/bin/brilcalc", "lumi", "-b", "STABLE BEAMS","-u", "/pb", "--begin", str(minRun),"--end",str(maxRun),"--output-style","csv","-c","web"])
 
 submitPVValidationJobs.output_BASH_name
 
 submitPVValidationJobs.output_file_list1 = list()
 
 submitPVValidationJobs.output_file_list2 = list()
 
 submitPVValidationJobs.output_full_name
 
 submitPVValidationJobs.output_LSF_name
 
 submitPVValidationJobs.output_number_name
 
 submitPVValidationJobs.outputCfgName
 
 submitPVValidationJobs.outputList = out.decode().split('\n')
 
 submitPVValidationJobs.p = Popen(cmd , shell=True, stdout=PIPE, stderr=PIPE)
 
def submitPVValidationJobs.params = self.extraCondVect[element].split(',')
 
 submitPVValidationJobs.parser = OptionParser(description=desc,version='%prog version 0.1')
 
 submitPVValidationJobs.pool = multiprocessing.Pool(processes=20)
 
 submitPVValidationJobs.ptcut = []
 
 submitPVValidationJobs.q = Popen(cmd2 , shell=True, stdout=PIPE, stderr=PIPE)
 
 submitPVValidationJobs.refittertype = []
 
 submitPVValidationJobs.run = runFromFilename(f)
 
 submitPVValidationJobs.runboundary = []
 
dictionary submitPVValidationJobs.runInfo = {}
 
 submitPVValidationJobs.runToCache = line.split(",")[0].split(":")[0]
 
 submitPVValidationJobs.s = Popen(cmd , shell=True, stdout=PIPE, stderr=PIPE)
 
 submitPVValidationJobs.scripts_dir = os.path.join(AnalysisStep_dir,"scripts")
 print "==========>",conditions More...
 
def submitPVValidationJobs.splitList = split(mylist,10)
 
list submitPVValidationJobs.srcFiles = []
 N.B. More...
 
 submitPVValidationJobs.stderr
 
 submitPVValidationJobs.stdout
 
string submitPVValidationJobs.submissionCommand = "condor_submit "+job_submit_file
 
def submitPVValidationJobs.submissionOutput = getCommandOutput(submissionCommand)
 
string submitPVValidationJobs.submitcommand1 = "chmod u+x " + os.path.join(self.LSF_dir,self.output_LSF_name)
 
string submitPVValidationJobs.t = ""
 
 submitPVValidationJobs.template_cfg_file = os.path.join(self.the_dir,"PVValidation_T_cfg.py")
 
 submitPVValidationJobs.the_dir
 
 submitPVValidationJobs.theBaseName = None
 
 submitPVValidationJobs.theBashDir = None
 
def submitPVValidationJobs.theDataSet = dataset.split("/")[1]+"_"+(dataset.split("/")[2]).split("-")[0]
 
 submitPVValidationJobs.thejobIndex = None
 
string submitPVValidationJobs.theLumi = '1'
 
int submitPVValidationJobs.totalJobs = 0
 
 submitPVValidationJobs.tracktype = []
 
 submitPVValidationJobs.ttrhtype = []
 
 submitPVValidationJobs.USER = os.environ.get('USER')
 
 submitPVValidationJobs.vertextype = []