CMS 3D CMS Logo

Classes | Namespaces | Functions | Variables
submitPVValidationJobs.py File Reference

Go to the source code of this file.

Classes

class  submitPVValidationJobs.BetterConfigParser
 — Classes —############################ More...
 
class  submitPVValidationJobs.Job
 aux generator function to split lists based on http://sandrotosi.blogspot.com/2011/04/python-group-list-in-sub-lists-of-n.html about generators see also http://stackoverflow.com/questions/231767/the-python-yield-keyword-explained More...
 
class  submitPVValidationJobs.RefitType
 

Namespaces

 submitPVValidationJobs
 

Functions

def submitPVValidationJobs.as_dict (config)
 
def submitPVValidationJobs.check_proxy ()
 
def submitPVValidationJobs.ConfigSectionMap (config, section)
 
def submitPVValidationJobs.forward_proxy (rundir)
 
def submitPVValidationJobs.getCommandOutput (command)
 
def submitPVValidationJobs.getFilesForRun (blob)
 
def submitPVValidationJobs.getLuminosity (homedir, minRun, maxRun, isRunBased, verbose)
 
def submitPVValidationJobs.getNEvents (run, dataset)
 
def submitPVValidationJobs.isInJSON (run, jsonfile)
 
def submitPVValidationJobs.main ()
 
def submitPVValidationJobs.mkdir_eos (out_path)
 method to create recursively directories on EOS ############# More...
 
def submitPVValidationJobs.split (sequence, size)
 
def submitPVValidationJobs.to_bool (value)
 
def submitPVValidationJobs.updateDB (run, runInfo)
 
def submitPVValidationJobs.updateDB2 ()
 
def submitPVValidationJobs.write_HTCondor_submit_file (path, name, nruns, proxy_path=None)
 

Variables

string submitPVValidationJobs.__author__ = 'Marco Musich'
 
string submitPVValidationJobs.__copyright__ = 'Copyright 2020, CERN CMS'
 
list submitPVValidationJobs.__credits__ = ['Ernesto Migliore', 'Salvatore Di Guida']
 
string submitPVValidationJobs.__email__ = 'marco.musich@cern.ch'
 
string submitPVValidationJobs.__license__ = 'Unknown'
 
string submitPVValidationJobs.__maintainer__ = 'Marco Musich'
 
int submitPVValidationJobs.__version__ = 1
 
 submitPVValidationJobs.action
 
 submitPVValidationJobs.aJob
 
list submitPVValidationJobs.alignmentDB = []
 
list submitPVValidationJobs.alignmentTAG = []
 
list submitPVValidationJobs.allFromGT = []
 
 submitPVValidationJobs.AnalysisStep_dir = os.path.join(input_CMSSW_BASE,"src/Alignment/OfflineValidation/test")
 
list submitPVValidationJobs.apeDB = []
 
list submitPVValidationJobs.apeTAG = []
 
list submitPVValidationJobs.applyBOWS = []
 
list submitPVValidationJobs.applyEXTRACOND = []
 
list submitPVValidationJobs.applyruncontrol = []
 
 submitPVValidationJobs.args
 
list submitPVValidationJobs.batchJobIds = []
 declare here the list of jobs that should be waited for More...
 
list submitPVValidationJobs.bowDB = []
 
list submitPVValidationJobs.bowTAG = []
 
 submitPVValidationJobs.child = os.popen(command)
 
string submitPVValidationJobs.cmd = 'dasgoclient -limit=0 -query \'run dataset='+opts.data + (' instance='+opts.instance+'\'' if (opts.instance is not None) else '\'')
 
string submitPVValidationJobs.cmd2 = ' dasgoclient -limit=0 -query \'file run='+blob[0][0]+' dataset='+blob[0][1]+ (' instance='+blob[1]+'\'' if (blob[1] is not None) else '\'')
 
list submitPVValidationJobs.conditions = []
 
 submitPVValidationJobs.config = BetterConfigParser()
 
 submitPVValidationJobs.ConfigFile = opts.inputconfig
 
string submitPVValidationJobs.CopyRights = '##################################\n'
 
 submitPVValidationJobs.count = pool.map(getFilesForRun,zip(mytuple,instances))
 
 submitPVValidationJobs.data = child.read()
 
string submitPVValidationJobs.dbName = "runInfo.pkl"
 
 submitPVValidationJobs.default
 
string submitPVValidationJobs.desc = """This is a description of %prog."""
 
 submitPVValidationJobs.dest
 
dictionary submitPVValidationJobs.dictionary = {}
 
list submitPVValidationJobs.doRunBased = []
 
 submitPVValidationJobs.dump
 
 submitPVValidationJobs.eosdir = os.path.join("/store/group/alca_trackeralign",USER,"test_out",t)
 
 submitPVValidationJobs.err = child.close()
 
list submitPVValidationJobs.extraCondVect = []
 
 submitPVValidationJobs.file_info = dict(zip(listOfRuns, count))
 
 submitPVValidationJobs.fout = open(hadd_script_file,'w')
 
list submitPVValidationJobs.gt = []
 
 submitPVValidationJobs.hadd_script_file = os.path.join(scripts_dir,jobName[iConf]+"_"+opts.taskname+".sh")
 
string submitPVValidationJobs.harvest_conditions = '"' + " && ".join(["ended(" + jobId + ")" for jobId in batchJobIds]) + '"'
 
 submitPVValidationJobs.help
 
 submitPVValidationJobs.HOME = os.environ.get('HOME')
 check first there is a valid grid proxy More...
 
dictionary submitPVValidationJobs.infos = {}
 
 submitPVValidationJobs.input_CMSSW_BASE = os.environ.get('CMSSW_BASE')
 
def submitPVValidationJobs.inputDict = as_dict(config)
 
list submitPVValidationJobs.inputFiles = []
 
list submitPVValidationJobs.instances = [opts.instance for entry in mytuple]
 
list submitPVValidationJobs.isDA = []
 
list submitPVValidationJobs.isMC = []
 
 submitPVValidationJobs.job_submit_file = os.path.join(path, "job_"+name+".submit")
 
string submitPVValidationJobs.job_submit_template
 
list submitPVValidationJobs.jobName = []
 Initialize all the variables. More...
 
 submitPVValidationJobs.jsonDATA = json.load(myJSON)
 
string submitPVValidationJobs.lastJobCommand = "bsub -o harvester"+opts.taskname+".tmp -q 1nh -w "+harvest_conditions+" "+hadd_script_file
 
def submitPVValidationJobs.lastJobOutput = getCommandOutput(lastJobCommand)
 
 submitPVValidationJobs.lib_path = os.path.abspath(AnalysisStep_dir)
 
def submitPVValidationJobs.listOfRuns = out.decode().split("\n")
 
 submitPVValidationJobs.listOfValidations = config.getResultingSection("validations")
 
 submitPVValidationJobs.local_proxy = subprocess.check_output(["voms-proxy-info", "--path"]).strip()
 
list submitPVValidationJobs.lumilist = []
 
 submitPVValidationJobs.lumiToCache = line.split(",")[-1].replace("\r", "")
 
list submitPVValidationJobs.maxevents = []
 
 submitPVValidationJobs.mergedFile = None
 
dictionary submitPVValidationJobs.myCachedLumi = {}
 
dictionary submitPVValidationJobs.mylist = {}
 
def submitPVValidationJobs.myLumiDB = getLuminosity(HOME,myRuns[0],myRuns[-1],doRunBased,opts.verbose)
 check that the list of runs is not empty More...
 
list submitPVValidationJobs.myRuns = []
 
list submitPVValidationJobs.mytuple = []
 
 submitPVValidationJobs.nEvents = subprocess.check_output(["das_client", "--limit", "0", "--query", "summary run={} dataset={} | grep summary.nevents".format(run, dataset)])
 
 submitPVValidationJobs.now = datetime.datetime.now()
 
 submitPVValidationJobs.od = collections.OrderedDict(sorted(file_info.items()))
 
 submitPVValidationJobs.opts
 
 submitPVValidationJobs.out
 
 submitPVValidationJobs.output = subprocess.check_output([homedir+"/.local/bin/brilcalc", "lumi", "-b", "STABLE BEAMS","-u", "/pb", "--begin", str(minRun),"--end",str(maxRun),"--output-style","csv","-c","web"])
 
 submitPVValidationJobs.output_file_list1 = list()
 
 submitPVValidationJobs.output_file_list2 = list()
 
 submitPVValidationJobs.outputList = out.decode().split('\n')
 
 submitPVValidationJobs.p = Popen(cmd , shell=True, stdout=PIPE, stderr=PIPE)
 
 submitPVValidationJobs.parser = OptionParser(description=desc,version='%prog version 0.1')
 
 submitPVValidationJobs.pool = multiprocessing.Pool(processes=20)
 
list submitPVValidationJobs.ptcut = []
 
 submitPVValidationJobs.q = Popen(cmd2 , shell=True, stdout=PIPE, stderr=PIPE)
 
list submitPVValidationJobs.refittertype = []
 
 submitPVValidationJobs.run = runFromFilename(f)
 
list submitPVValidationJobs.runboundary = []
 
dictionary submitPVValidationJobs.runInfo = {}
 
 submitPVValidationJobs.runToCache = line.split(",")[0].split(":")[0]
 
 submitPVValidationJobs.s = Popen(cmd , shell=True, stdout=PIPE, stderr=PIPE)
 
 submitPVValidationJobs.scripts_dir = os.path.join(AnalysisStep_dir,"scripts")
 print "==========>",conditions More...
 
def submitPVValidationJobs.splitList = split(mylist,10)
 
list submitPVValidationJobs.srcFiles = []
 N.B. More...
 
 submitPVValidationJobs.stderr
 
 submitPVValidationJobs.stdout
 
string submitPVValidationJobs.submissionCommand = "condor_submit "+job_submit_file
 
def submitPVValidationJobs.submissionOutput = getCommandOutput(submissionCommand)
 
string submitPVValidationJobs.t = ""
 
 submitPVValidationJobs.theBaseName = None
 
 submitPVValidationJobs.theBashDir = None
 
 submitPVValidationJobs.thejobIndex = None
 
string submitPVValidationJobs.theLumi = '1'
 
int submitPVValidationJobs.totalJobs = 0
 
list submitPVValidationJobs.tracktype = []
 
list submitPVValidationJobs.ttrhtype = []
 
 submitPVValidationJobs.USER = os.environ.get('USER')
 
list submitPVValidationJobs.vertextype = []