|
string | submitPVResolutionJobs.__author__ = 'Marco Musich' |
|
string | submitPVResolutionJobs.__copyright__ = 'Copyright 2020, CERN CMS' |
|
list | submitPVResolutionJobs.__credits__ = ['Ernesto Migliore', 'Salvatore Di Guida'] |
|
string | submitPVResolutionJobs.__email__ = 'marco.musich@cern.ch' |
|
string | submitPVResolutionJobs.__license__ = 'Unknown' |
|
string | submitPVResolutionJobs.__maintainer__ = 'Marco Musich' |
|
int | submitPVResolutionJobs.__version__ = 1 |
|
| submitPVResolutionJobs.action |
|
| submitPVResolutionJobs.args |
|
| submitPVResolutionJobs.bashdir = os.path.join(cwd,"BASH") |
|
string | submitPVResolutionJobs.cmd2 = ' dasgoclient -limit=0 -query \'file run='+blob[0]+' dataset='+blob[1]+'\'' |
|
| submitPVResolutionJobs.config = ConfigParser.ConfigParser() |
| parse the configuration file More...
|
|
string | submitPVResolutionJobs.CopyRights = '##################################\n' |
|
| submitPVResolutionJobs.count = pool.map(getFilesForRun,mytuple) |
|
| submitPVResolutionJobs.cwd = os.getcwd() |
|
| submitPVResolutionJobs.default |
|
string | submitPVResolutionJobs.desc = """This is a description of %prog.""" |
|
| submitPVResolutionJobs.dest |
|
dictionary | submitPVResolutionJobs.dictionary = {} |
|
| submitPVResolutionJobs.dump |
|
| submitPVResolutionJobs.eosdir = os.path.join("/store/group/alca_trackeralign",USER,"test_out",opts.taskname) |
|
| submitPVResolutionJobs.err |
|
| submitPVResolutionJobs.file_info = dict(zip(runs, count)) |
|
| submitPVResolutionJobs.files = file_info[run] |
|
| submitPVResolutionJobs.help |
|
| submitPVResolutionJobs.HOME = os.environ.get('HOME') |
|
| submitPVResolutionJobs.input_CMSSW_BASE = os.environ.get('CMSSW_BASE') |
|
def | submitPVResolutionJobs.inputDict = as_dict(config) |
|
| submitPVResolutionJobs.job_submit_file = os.path.join(path, "job_"+name+".submit") |
|
string | submitPVResolutionJobs.job_submit_template |
|
| submitPVResolutionJobs.jsonDATA = json.load(myJSON) |
|
| submitPVResolutionJobs.key = key.split(":", 1)[1] |
| prepare the HTCondor submission files and eventually submit them More...
|
|
string | submitPVResolutionJobs.listOfFiles = '[' |
|
| submitPVResolutionJobs.local_proxy = subprocess.check_output(["voms-proxy-info", "--path"]).strip() |
|
def | submitPVResolutionJobs.lumimask = inputDict["Input"]["lumimask"] |
|
| submitPVResolutionJobs.lumiToCache = line.split(",")[-1].replace("\r", "") |
|
dictionary | submitPVResolutionJobs.myCachedLumi = {} |
|
def | submitPVResolutionJobs.myLumiDB = getLuminosity(HOME,runs[0],runs[-1],True,opts.verbose) |
| check that the list of runs is not empty More...
|
|
list | submitPVResolutionJobs.mytuple = [] |
|
| submitPVResolutionJobs.opts |
|
| submitPVResolutionJobs.out |
|
| submitPVResolutionJobs.output = subprocess.check_output([homedir+"/.local/bin/brilcalc", "lumi", "-b", "STABLE BEAMS","-u", "/pb", "--begin", str(minRun),"--end",str(maxRun),"--output-style","csv"]) |
| using normtag output = subprocess.check_output([homedir+"/.local/bin/brilcalc", "lumi", "-b", "STABLE BEAMS", "--normtag","/cvmfs/cms-bril.cern.ch/cms-lumi-pog/Normtags/normtag_PHYSICS.json", "-u", "/pb", "--begin", str(minRun),"--end",str(maxRun),"--output-style","csv"]) More...
|
|
| submitPVResolutionJobs.outputList = out.decode().split('\n') |
|
| submitPVResolutionJobs.p = subprocess.Popen(*args, **kwargs) |
|
| submitPVResolutionJobs.parser = OptionParser(description=desc,version='%prog version 0.1') |
|
| submitPVResolutionJobs.pool = multiprocessing.Pool(processes=20) |
|
| submitPVResolutionJobs.q = Popen(cmd2 , shell=True, stdout=PIPE, stderr=PIPE) |
|
def | submitPVResolutionJobs.runs = get_status_output("dasgoclient -query='run dataset="+opts.DATASET+"'",shell=True, stdout=PIPE, stderr=PIPE)[1].decode().split("\n") |
| check first there is a valid grid proxy More...
|
|
| submitPVResolutionJobs.runToCache = line.split(",")[0].split(":")[0] |
|
string | submitPVResolutionJobs.script |
|
| submitPVResolutionJobs.scriptFile = open(scriptFileName,'w') |
|
| submitPVResolutionJobs.scriptFileName = os.path.join(bashdir,"batchHarvester_"+key+"_"+str(count-1)+".sh") |
|
| submitPVResolutionJobs.stderr |
|
| submitPVResolutionJobs.stdout |
|
string | submitPVResolutionJobs.submissionCommand = "condor_submit "+job_submit_file |
|
string | submitPVResolutionJobs.theLumi = '1' |
|
| submitPVResolutionJobs.USER = os.environ.get('USER') |
| prepare the eos output directory More...
|
|