CMS 3D CMS Logo

All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Modules Pages
Namespaces | Functions | Variables
submitPVResolutionJobs.py File Reference

Go to the source code of this file.

Namespaces

 submitPVResolutionJobs
 

Functions

def submitPVResolutionJobs.as_dict (config)
 
def submitPVResolutionJobs.batchScriptCERN (theCMSSW_BASE, cfgdir, runindex, eosdir, lumiToRun, key, config, tkCollection, isUnitTest=False)
 
def submitPVResolutionJobs.check_proxy ()
 
def submitPVResolutionJobs.forward_proxy (rundir)
 
def submitPVResolutionJobs.get_status_output (args, kwargs)
 
def submitPVResolutionJobs.getFilesForRun (blob)
 
def submitPVResolutionJobs.getLuminosity (homedir, minRun, maxRun, isRunBased, verbose)
 
def submitPVResolutionJobs.isInJSON (run, jsonfile)
 
def submitPVResolutionJobs.main ()
 
def submitPVResolutionJobs.mkdir_eos (out_path)
 method to create recursively directories on EOS More...
 
def submitPVResolutionJobs.write_HTCondor_submit_file (path, name, nruns, proxy_path=None)
 

Variables

string submitPVResolutionJobs.__author__ = 'Marco Musich'
 
string submitPVResolutionJobs.__copyright__ = 'Copyright 2020, CERN CMS'
 
list submitPVResolutionJobs.__credits__ = ['Ernesto Migliore', 'Salvatore Di Guida']
 
string submitPVResolutionJobs.__email__ = 'marco.musich@cern.ch'
 
string submitPVResolutionJobs.__license__ = 'Unknown'
 
string submitPVResolutionJobs.__maintainer__ = 'Marco Musich'
 
int submitPVResolutionJobs.__version__ = 1
 
 submitPVResolutionJobs.action
 
 submitPVResolutionJobs.args
 
 submitPVResolutionJobs.bashdir = os.path.join(cwd,"BASH")
 
 submitPVResolutionJobs.cfgdir = os.path.join(cwd,"cfg")
 
string submitPVResolutionJobs.cmd2 = ' dasgoclient -limit=0 -query \'file run='+blob[0]+' dataset='+blob[1]+'\''
 
 submitPVResolutionJobs.config = ConfigParser.ConfigParser()
 parse the configuration file More...
 
string submitPVResolutionJobs.CopyRights = '##################################\n'
 
 submitPVResolutionJobs.count = pool.map(getFilesForRun,mytuple)
 
 submitPVResolutionJobs.cwd = os.getcwd()
 
 submitPVResolutionJobs.default
 
string submitPVResolutionJobs.desc = """This is a description of %prog."""
 
 submitPVResolutionJobs.dest
 
dictionary submitPVResolutionJobs.dictionary = {}
 
 submitPVResolutionJobs.dump
 
 submitPVResolutionJobs.eosdir = os.path.join("/store/group/alca_trackeralign",USER,"test_out",opts.taskname)
 
 submitPVResolutionJobs.err
 
 submitPVResolutionJobs.file_info = dict(zip([run for run, _ in mytuple], count))
 
 submitPVResolutionJobs.files = file_info[run]
 
 submitPVResolutionJobs.help
 
 submitPVResolutionJobs.HOME = os.environ.get('HOME')
 
 submitPVResolutionJobs.input_CMSSW_BASE = os.environ.get('CMSSW_BASE')
 
def submitPVResolutionJobs.inputDict = as_dict(config)
 
 submitPVResolutionJobs.job_submit_file = os.path.join(path, "job_"+name+".submit")
 
string submitPVResolutionJobs.job_submit_template
 
 submitPVResolutionJobs.jsonDATA = json.load(myJSON)
 
 submitPVResolutionJobs.KEY
 
 submitPVResolutionJobs.key = key.split(":", 1)[1]
 prepare the HTCondor submission files and eventually submit them More...
 
string submitPVResolutionJobs.listOfFiles = '['
 
 submitPVResolutionJobs.local_proxy = subprocess.check_output(["voms-proxy-info", "--path"]).strip()
 
def submitPVResolutionJobs.lumimask = inputDict["Input"]["lumimask"]
 
 submitPVResolutionJobs.lumiToCache = line.split(",")[-1].replace("\r", "")
 
dictionary submitPVResolutionJobs.myCachedLumi = {}
 
def submitPVResolutionJobs.myLumiDB = getLuminosity(HOME,runs[0],runs[-1],True,opts.verbose)
 check that the list of runs is not empty More...
 
list submitPVResolutionJobs.mytuple = []
 
 submitPVResolutionJobs.opts
 
 submitPVResolutionJobs.out
 
 submitPVResolutionJobs.output = subprocess.check_output([homedir+"/.local/bin/brilcalc", "lumi", "-b", "STABLE BEAMS","-u", "/pb", "--begin", str(minRun),"--end",str(maxRun),"--output-style","csv"])
 using normtag output = subprocess.check_output([homedir+"/.local/bin/brilcalc", "lumi", "-b", "STABLE BEAMS", "--normtag","/cvmfs/cms-bril.cern.ch/cms-lumi-pog/Normtags/normtag_PHYSICS.json", "-u", "/pb", "--begin", str(minRun),"--end",str(maxRun),"--output-style","csv"]) More...
 
 submitPVResolutionJobs.outputList = out.decode().split('\n')
 
 submitPVResolutionJobs.p = subprocess.Popen(*args, **kwargs)
 
 submitPVResolutionJobs.parser = OptionParser(description=desc,version='%prog version 0.1')
 
 submitPVResolutionJobs.pool = multiprocessing.Pool(processes=20)
 
 submitPVResolutionJobs.q = Popen(cmd2 , shell=True, stdout=PIPE, stderr=PIPE)
 
 submitPVResolutionJobs.runindex
 
def submitPVResolutionJobs.runs = get_status_output("dasgoclient -query='run dataset="+opts.DATASET+"'",shell=True, stdout=PIPE, stderr=PIPE)[1].decode().split("\n")
 check first there is a valid grid proxy More...
 
 submitPVResolutionJobs.runToCache = line.split(",")[0].split(":")[0]
 
string submitPVResolutionJobs.script
 
 submitPVResolutionJobs.scriptFile = open(scriptFileName,'w')
 
 submitPVResolutionJobs.scriptFileName = os.path.join(bashdir,"batchHarvester_"+key+"_"+str(count-1)+".sh")
 
 submitPVResolutionJobs.stderr
 
 submitPVResolutionJobs.stdout
 
string submitPVResolutionJobs.submissionCommand = "condor_submit "+job_submit_file
 
string submitPVResolutionJobs.theLumi = '1'
 
def submitPVResolutionJobs.tkCollection = inputDict["Input"]["trackcollection"]
 
 submitPVResolutionJobs.USER = os.environ.get('USER')
 prepare the eos output directory More...