CMS 3D CMS Logo

submitPVValidationJobs.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 
3 '''Script that submits CMS Tracker Alignment Primary Vertex Validation workflows,
4 usage:
5 
6 submitPVValidationJobs.py -j TEST -D /HLTPhysics/Run2016C-TkAlMinBias-07Dec2018-v1/ALCARECO -i testPVValidation_Relvals_DATA.ini -r
7 '''
8 
9 from __future__ import print_function
10 from builtins import range
11 
12 __author__ = 'Marco Musich'
13 __copyright__ = 'Copyright 2020, CERN CMS'
14 __credits__ = ['Ernesto Migliore', 'Salvatore Di Guida']
15 __license__ = 'Unknown'
16 __maintainer__ = 'Marco Musich'
17 __email__ = 'marco.musich@cern.ch'
18 __version__ = 1
19 
20 import datetime,time
21 import os,sys
22 import copy
23 import pickle
24 import string, re
25 import configparser as ConfigParser
26 import json
27 import pprint
28 import subprocess
29 from optparse import OptionParser
30 from subprocess import Popen, PIPE
31 import collections
32 import warnings
33 import shutil
34 import multiprocessing
35 from enum import Enum
36 
37 class RefitType(Enum):
38  STANDARD = 1
39  COMMON = 2
40 
41 CopyRights = '##################################\n'
42 CopyRights += '# submitPVValidationJobs.py #\n'
43 CopyRights += '# marco.musich@cern.ch #\n'
44 CopyRights += '# April 2020 #\n'
45 CopyRights += '##################################\n'
46 
47 
49 
50  """Check if GRID proxy has been initialized."""
51 
52  try:
53  with open(os.devnull, "w") as dump:
54  subprocess.check_call(["voms-proxy-info", "--exists"],
55  stdout = dump, stderr = dump)
56  except subprocess.CalledProcessError:
57  return False
58  return True
59 
60 
61 def forward_proxy(rundir):
62 
63  """Forward proxy to location visible from the batch system.
64  Arguments:
65  - `rundir`: directory for storing the forwarded proxy
66  """
67 
68  if not check_proxy():
69  print("Please create proxy via 'voms-proxy-init -voms cms -rfc'.")
70  sys.exit(1)
71 
72  local_proxy = subprocess.check_output(["voms-proxy-info", "--path"]).strip()
73  shutil.copyfile(local_proxy, os.path.join(rundir,".user_proxy"))
74 
75 
76 def write_HTCondor_submit_file(path, name, nruns, proxy_path=None):
77 
78  """Writes 'job.submit' file in `path`.
79  Arguments:
80  - `path`: job directory
81  - `script`: script to be executed
82  - `proxy_path`: path to proxy (only used in case of requested proxy forward)
83  """
84 
85  job_submit_template="""\
86 universe = vanilla
87 requirements = (OpSysAndVer =?= "CentOS7")
88 executable = {script:s}
89 output = {jobm:s}/{out:s}.out
90 error = {jobm:s}/{out:s}.err
91 log = {jobm:s}/{out:s}.log
92 transfer_output_files = ""
93 +JobFlavour = "{flavour:s}"
94 queue {njobs:s}
95 """
96  if proxy_path is not None:
97  job_submit_template += """\
98 +x509userproxy = "{proxy:s}"
99 """
100 
101  job_submit_file = os.path.join(path, "job_"+name+".submit")
102  with open(job_submit_file, "w") as f:
103  f.write(job_submit_template.format(script = os.path.join(path,name+"_$(ProcId).sh"),
104  out = name+"_$(ProcId)",
105  jobm = os.path.abspath(path),
106  flavour = "tomorrow",
107  njobs = str(nruns),
108  proxy = proxy_path))
109 
110  return job_submit_file
111 
112 
113 def getCommandOutput(command):
114 
115  """This function executes `command` and returns it output.
116  Arguments:
117  - `command`: Shell command to be invoked by this function.
118  """
119  child = os.popen(command)
120  data = child.read()
121  err = child.close()
122  if err:
123  print('%s failed w/ exit code %d' % (command, err))
124  return data
125 
126 
127 def getFilesForRun(blob):
128 
129  cmd2 = ' dasgoclient -limit=0 -query \'file run='+blob[0][0]+' dataset='+blob[0][1]+ (' instance='+blob[1]+'\'' if (blob[1] is not None) else '\'')
130  #cmd2 = 'dasgoclient -query \'file run='+blob[0]+' dataset='+blob[1]+'\''
131  q = Popen(cmd2 , shell=True, stdout=PIPE, stderr=PIPE)
132  out, err = q.communicate()
133  #print(cmd2,'\n',out.rstrip('\n'))
134  outputList = out.decode().split('\n')
135  outputList.pop()
136  return outputList #,err
137 
138 
139 def getNEvents(run, dataset):
140 
141  nEvents = subprocess.check_output(["das_client", "--limit", "0", "--query", "summary run={} dataset={} | grep summary.nevents".format(run, dataset)])
142  return 0 if nEvents == "[]\n" else int(nEvents)
143 
144 
145 def getLuminosity(homedir,minRun,maxRun,isRunBased,verbose):
146 
147  """Expects something like
148  +-------+------+--------+--------+-------------------+------------------+
149  | nfill | nrun | nls | ncms | totdelivered(/fb) | totrecorded(/fb) |
150  +-------+------+--------+--------+-------------------+------------------+
151  | 73 | 327 | 142418 | 138935 | 19.562 | 18.036 |
152  +-------+------+--------+--------+-------------------+------------------+
153  And extracts the total recorded luminosity (/b).
154  """
155  myCachedLumi={}
156  if(not isRunBased):
157  return myCachedLumi
158 
159  try:
160  #output = subprocess.check_output([homedir+"/.local/bin/brilcalc", "lumi", "-b", "STABLE BEAMS", "--normtag=/afs/cern.ch/user/l/lumipro/public/normtag_file/normtag_BRIL.json", "-u", "/pb", "--begin", str(minRun),"--end",str(maxRun),"--output-style","csv"])
161 
162  output = subprocess.check_output([homedir+"/.local/bin/brilcalc", "lumi", "-b", "STABLE BEAMS","-u", "/pb", "--begin", str(minRun),"--end",str(maxRun),"--output-style","csv","-c","web"])
163  except:
164  warnings.warn('ATTENTION! Impossible to query the BRIL DB!')
165  return myCachedLumi
166 
167  if(verbose):
168  print("INSIDE GET LUMINOSITY")
169  print(output)
170 
171  for line in output.decode().split("\n"):
172  if ("#" not in line):
173  runToCache = line.split(",")[0].split(":")[0]
174  lumiToCache = line.split(",")[-1].replace("\r", "")
175  #print "run",runToCache
176  #print "lumi",lumiToCache
177  myCachedLumi[runToCache] = lumiToCache
178 
179  if(verbose):
180  print(myCachedLumi)
181  return myCachedLumi
182 
183 
184 def isInJSON(run,jsonfile):
185 
186  try:
187  with open(jsonfile, 'r') as myJSON:
188  jsonDATA = json.load(myJSON)
189  return (run in jsonDATA)
190  except:
191  warnings.warn('ATTENTION! Impossible to find lumi mask! All runs will be used.')
192  return True
193 
194 
195 def as_dict(config):
196 
197  dictionary = {}
198  for section in config.sections():
199  dictionary[section] = {}
200  for option in config.options(section):
201  dictionary[section][option] = config.get(section, option)
202 
203  return dictionary
204 
205 
206 def to_bool(value):
207 
208  """
209  Converts 'something' to boolean. Raises exception for invalid formats
210  Possible True values: 1, True, "1", "TRue", "yes", "y", "t"
211  Possible False values: 0, False, None, [], {}, "", "0", "faLse", "no", "n", "f", 0.0, ...
212  """
213  if str(value).lower() in ("yes", "y", "true", "t", "1"): return True
214  if str(value).lower() in ("no", "n", "false", "f", "0", "0.0", "", "none", "[]", "{}"): return False
215  raise Exception('Invalid value for boolean conversion: ' + str(value))
216 
217 
218 def updateDB2():
219 
220  dbName = "runInfo.pkl"
221  infos = {}
222  if os.path.exists(dbName):
223  with open(dbName,'rb') as f:
224  infos = pickle.load(f)
225 
226  for f in glob.glob("root-files/Run*.root"):
227  run = runFromFilename(f)
228  if run not in infos:
229  infos[run] = {}
230  infos[run]["start_time"] = getRunStartTime(run)
231  infos["isValid"] = isValid(f)
232 
233  with open(dbName, "wb") as f:
234  pickle.dump(infos, f)
235 
236 
237 def updateDB(run,runInfo):
238 
239  dbName = "runInfo.pkl"
240  infos = {}
241  if os.path.exists(dbName):
242  with open(dbName,'rb') as f:
243  infos = pickle.load(f)
244 
245  if run not in infos:
246  infos[run] = runInfo
247 
248  with open(dbName, "wb") as f:
249  pickle.dump(infos, f)
250 
251 
252 class BetterConfigParser(ConfigParser.ConfigParser):
253 
254 
255  def optionxform(self, optionstr):
256  return optionstr
257 
258 
259  def exists( self, section, option):
260  try:
261  items = self.items(section)
262  except ConfigParser.NoSectionError:
263  return False
264  for item in items:
265  if item[0] == option:
266  return True
267  return False
268 
269 
270  def __updateDict( self, dictionary, section ):
271  result = dictionary
272  try:
273  for option in self.options( section ):
274  result[option] = self.get( section, option )
275  if "local"+section.title() in self.sections():
276  for option in self.options( "local"+section.title() ):
277  result[option] = self.get( "local"+section.title(),option )
278  except ConfigParser.NoSectionError as section:
279  msg = ("%s in configuration files. This section is mandatory."
280  %(str(section).replace(":", "", 1)))
281  #raise AllInOneError(msg)
282  return result
283 
284 
285  def getResultingSection( self, section, defaultDict = {}, demandPars = [] ):
286  result = copy.deepcopy(defaultDict)
287  for option in demandPars:
288  try:
289  result[option] = self.get( section, option )
290  except ConfigParser.NoOptionError as globalSectionError:
291  globalSection = str( globalSectionError ).split( "'" )[-2]
292  splittedSectionName = section.split( ":" )
293  if len( splittedSectionName ) > 1:
294  localSection = ("local"+section.split( ":" )[0].title()+":"
295  +section.split(":")[1])
296  else:
297  localSection = ("local"+section.split( ":" )[0].title())
298  if self.has_section( localSection ):
299  try:
300  result[option] = self.get( localSection, option )
301  except ConfigParser.NoOptionError as option:
302  msg = ("%s. This option is mandatory."
303  %(str(option).replace(":", "", 1).replace(
304  "section",
305  "section '"+globalSection+"' or", 1)))
306  #raise AllInOneError(msg)
307  else:
308  msg = ("%s. This option is mandatory."
309  %(str(globalSectionError).replace(":", "", 1)))
310  #raise AllInOneError(msg)
311  result = self.__updateDict( result, section )
312  #print(result)
313  return result
314 
315 
316 def ConfigSectionMap(config, section):
317  the_dict = {}
318  options = config.options(section)
319  for option in options:
320  try:
321  the_dict[option] = config.get(section, option)
322  if the_dict[option] == -1:
323  DebugPrint("skip: %s" % option)
324  except:
325  print("exception on %s!" % option)
326  the_dict[option] = None
327  return the_dict
328 
329 
330 def mkdir_eos(out_path):
331  print("creating",out_path)
332  newpath='/'
333  for dir in out_path.split('/'):
334  newpath=os.path.join(newpath,dir)
335  # do not issue mkdir from very top of the tree
336  if newpath.find('test_out') > 0:
337  #getCommandOutput("eos mkdir"+newpath)
338  command="/afs/cern.ch/project/eos/installation/cms/bin/eos.select mkdir "+newpath
339  p = subprocess.Popen(command,shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
340  (out, err) = p.communicate()
341  #print(out,err)
342  p.wait()
343 
344  # now check that the directory exists
345  command2="/afs/cern.ch/project/eos/installation/cms/bin/eos.select ls "+out_path
346  p = subprocess.Popen(command2,shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
347  (out, err) = p.communicate()
348  p.wait()
349  if p.returncode !=0:
350  print(out)
351 
352 def split(sequence, size):
353 
358  for i in range(0, len(sequence), size):
359  yield sequence[i:i+size]
360 
361 
362 class Job:
363 
364 
365  def __init__(self,dataset, job_number, job_id, job_name, isDA, isMC, applyBOWS, applyEXTRACOND, extraconditions, runboundary, lumilist, intlumi, maxevents, gt, allFromGT, alignmentDB, alignmentTAG, apeDB, apeTAG, bowDB, bowTAG, vertextype, tracktype, refittertype, ttrhtype, applyruncontrol, ptcut, CMSSW_dir ,the_dir):
366 
367 
368  theDataSet = dataset.split("/")[1]+"_"+(dataset.split("/")[2]).split("-")[0]
369 
370  self.data = theDataSet
371  self.job_number = job_number
372  self.job_id = job_id
373  self.batch_job_id = None
374  self.job_name = job_name
375 
376  self.isDA = isDA
377  self.isMC = isMC
378  self.applyBOWS = applyBOWS
379  self.applyEXTRACOND = applyEXTRACOND
380  self.extraCondVect = extraconditions
381  self.runboundary = runboundary
382  self.lumilist = lumilist
383  self.intlumi = intlumi
384  self.maxevents = maxevents
385  self.gt = gt
386  self.allFromGT = allFromGT
387  self.alignmentDB = alignmentDB
388  self.alignmentTAG = alignmentTAG
389  self.apeDB = apeDB
390  self.apeTAG = apeTAG
391  self.bowDB = bowDB
392  self.bowTAG = bowTAG
393  self.vertextype = vertextype
394  self.tracktype = tracktype
395  self.refittertype = refittertype
396  self.ttrhtype = ttrhtype
397  self.applyruncontrol = applyruncontrol
398  self.ptcut = ptcut
399 
400  self.the_dir=the_dir
401  self.CMSSW_dir=CMSSW_dir
402 
405 
406  self.cfg_dir=None
407  self.outputCfgName=None
408 
409  # LSF variables
410  self.LSF_dir=None
411  self.BASH_dir=None
412  self.output_LSF_name=None
413  self.output_BASH_name=None
414 
415  self.lfn_list=list()
416 
417  def __del__(self):
418 
419  del self.lfn_list
420 
421  def setEOSout(self,theEOSdir):
422 
423  self.OUTDIR = theEOSdir
424 
425  def getOutputBaseName(self):
426 
427  return "PVValidation_"+self.job_name
428 
429  def getOutputBaseNameWithData(self):
430 
431  return "PVValidation_"+self.job_name+"_"+self.data
432 
433  def createTheCfgFile(self,lfn):
434 
435 
436  global CopyRights
437  # write the cfg file
438 
439  self.cfg_dir = os.path.join(self.the_dir,"cfg")
440  if not os.path.exists(self.cfg_dir):
441  os.makedirs(self.cfg_dir)
442 
443  self.outputCfgName=self.output_full_name+"_cfg.py"
444  fout=open(os.path.join(self.cfg_dir,self.outputCfgName),'w')
445 
446  template_cfg_file = os.path.join(self.the_dir,"PVValidation_T_cfg.py")
447 
448  fin = open(template_cfg_file)
449 
450  config_txt = '\n\n' + CopyRights + '\n\n'
451  config_txt += fin.read()
452 
453  config_txt=config_txt.replace("ISDATEMPLATE",self.isDA)
454  config_txt=config_txt.replace("ISMCTEMPLATE",self.isMC)
455  config_txt=config_txt.replace("APPLYBOWSTEMPLATE",self.applyBOWS)
456  config_txt=config_txt.replace("EXTRACONDTEMPLATE",self.applyEXTRACOND)
457  config_txt=config_txt.replace("USEFILELISTTEMPLATE","True")
458  config_txt=config_txt.replace("RUNBOUNDARYTEMPLATE",self.runboundary)
459  config_txt=config_txt.replace("LUMILISTTEMPLATE",self.lumilist)
460  config_txt=config_txt.replace("MAXEVENTSTEMPLATE",self.maxevents)
461  config_txt=config_txt.replace("GLOBALTAGTEMPLATE",self.gt)
462  config_txt=config_txt.replace("ALLFROMGTTEMPLATE",self.allFromGT)
463  config_txt=config_txt.replace("ALIGNOBJTEMPLATE",self.alignmentDB)
464  config_txt=config_txt.replace("GEOMTAGTEMPLATE",self.alignmentTAG)
465  config_txt=config_txt.replace("APEOBJTEMPLATE",self.apeDB)
466  config_txt=config_txt.replace("ERRORTAGTEMPLATE",self.apeTAG)
467  config_txt=config_txt.replace("BOWSOBJECTTEMPLATE",self.bowDB)
468  config_txt=config_txt.replace("BOWSTAGTEMPLATE",self.bowTAG)
469  config_txt=config_txt.replace("VERTEXTYPETEMPLATE",self.vertextype)
470  config_txt=config_txt.replace("TRACKTYPETEMPLATE",self.tracktype)
471  config_txt=config_txt.replace("REFITTERTEMPLATE",self.refittertype)
472  config_txt=config_txt.replace("TTRHBUILDERTEMPLATE",self.ttrhtype)
473  config_txt=config_txt.replace("PTCUTTEMPLATE",self.ptcut)
474  config_txt=config_txt.replace("INTLUMITEMPLATE",self.intlumi)
475  config_txt=config_txt.replace("RUNCONTROLTEMPLATE",self.applyruncontrol)
476  lfn_with_quotes = map(lambda x: "\'"+x+"\'",lfn)
477  config_txt=config_txt.replace("FILESOURCETEMPLATE","["+",".join(lfn_with_quotes)+"]")
478  config_txt=config_txt.replace("OUTFILETEMPLATE",self.output_full_name+".root")
479 
480  fout.write(config_txt)
481 
482  for line in fin.readlines():
483 
484  if 'END OF EXTRA CONDITIONS' in line:
485  for element in self.extraCondVect :
486  if("Rcd" in element):
487  params = self.extraCondVect[element].split(',')
488 
489  fout.write(" \n")
490  fout.write(" process.conditionsIn"+element+"= CalibTracker.Configuration.Common.PoolDBESSource_cfi.poolDBESSource.clone( \n")
491  fout.write(" connect = cms.string('"+params[0]+"'), \n")
492  fout.write(" toGet = cms.VPSet(cms.PSet(record = cms.string('"+element+"'), \n")
493  fout.write(" tag = cms.string('"+params[1]+"'), \n")
494  if (len(params)>2):
495  fout.write(" label = cms.untracked.string('"+params[2]+"') \n")
496  fout.write(" ) \n")
497  fout.write(" ) \n")
498  fout.write(" ) \n")
499  fout.write(" process.prefer_conditionsIn"+element+" = cms.ESPrefer(\"PoolDBESSource\", \"conditionsIn"+element[0]+"\") \n \n")
500  fout.write(line)
501  fout.close()
502 
503  def createTheLSFFile(self):
504 
505 
506  # directory to store the LSF to be submitted
507  self.LSF_dir = os.path.join(self.the_dir,"LSF")
508  if not os.path.exists(self.LSF_dir):
509  os.makedirs(self.LSF_dir)
510 
511  self.output_LSF_name=self.output_full_name+".lsf"
512  fout=open(os.path.join(self.LSF_dir,self.output_LSF_name),'w')
513 
514  job_name = self.output_full_name
515 
516  log_dir = os.path.join(self.the_dir,"log")
517  if not os.path.exists(log_dir):
518  os.makedirs(log_dir)
519 
520  fout.write("#!/bin/sh \n")
521  fout.write("#BSUB -L /bin/sh\n")
522  fout.write("#BSUB -J "+job_name+"\n")
523  fout.write("#BSUB -o "+os.path.join(log_dir,job_name+".log")+"\n")
524  fout.write("#BSUB -q cmscaf1nd \n")
525  fout.write("JobName="+job_name+" \n")
526  fout.write("OUT_DIR="+self.OUTDIR+" \n")
527  fout.write("LXBATCH_DIR=`pwd` \n")
528  fout.write("cd "+os.path.join(self.CMSSW_dir,"src")+" \n")
529  fout.write("eval `scram runtime -sh` \n")
530  fout.write("cd $LXBATCH_DIR \n")
531  fout.write("cmsRun "+os.path.join(self.cfg_dir,self.outputCfgName)+" \n")
532  fout.write("ls -lh . \n")
533  fout.write("for RootOutputFile in $(ls *root ); do xrdcp -f ${RootOutputFile} root://eoscms//eos/cms${OUT_DIR}/${RootOutputFile} ; done \n")
534  fout.write("for TxtOutputFile in $(ls *txt ); do xrdcp -f ${TxtOutputFile} root://eoscms//eos/cms${OUT_DIR}/${TxtOutputFile} ; done \n")
535 
536  fout.close()
537 
538 
539  def createTheBashFile(self):
540 
541 
542  # directory to store the BASH to be submitted
543  self.BASH_dir = os.path.join(self.the_dir,"BASH")
544  if not os.path.exists(self.BASH_dir):
545  os.makedirs(self.BASH_dir)
546 
547  self.output_BASH_name=self.output_number_name+".sh"
548  fout=open(os.path.join(self.BASH_dir,self.output_BASH_name),'w')
549 
550  job_name = self.output_full_name
551 
552  log_dir = os.path.join(self.the_dir,"log")
553  if not os.path.exists(log_dir):
554  os.makedirs(log_dir)
555 
556  fout.write("#!/bin/bash \n")
557  #fout.write("export EOS_MGM_URL=root://eoscms.cern.ch \n")
558  fout.write("JobName="+job_name+" \n")
559  fout.write("echo \"Job started at \" `date` \n")
560  fout.write("CMSSW_DIR="+os.path.join(self.CMSSW_dir,"src")+" \n")
561  fout.write("export X509_USER_PROXY=$CMSSW_DIR/Alignment/OfflineValidation/test/.user_proxy \n")
562  fout.write("OUT_DIR="+self.OUTDIR+" \n")
563  fout.write("LXBATCH_DIR=$PWD \n")
564  #fout.write("cd "+os.path.join(self.CMSSW_dir,"src")+" \n")
565  fout.write("cd ${CMSSW_DIR} \n")
566  fout.write("eval `scramv1 runtime -sh` \n")
567  fout.write("echo \"batch dir: $LXBATCH_DIR release: $CMSSW_DIR release base: $CMSSW_RELEASE_BASE\" \n")
568  fout.write("cd $LXBATCH_DIR \n")
569  fout.write("cp "+os.path.join(self.cfg_dir,self.outputCfgName)+" . \n")
570  fout.write("echo \"cmsRun "+self.outputCfgName+"\" \n")
571  fout.write("cmsRun "+self.outputCfgName+" \n")
572  fout.write("echo \"Content of working dir is \"`ls -lh` \n")
573  #fout.write("less condor_exec.exe \n")
574  fout.write("for RootOutputFile in $(ls *root ); do xrdcp -f ${RootOutputFile} root://eoscms//eos/cms${OUT_DIR}/${RootOutputFile} ; done \n")
575  #fout.write("mv ${JobName}.out ${CMSSW_DIR}/BASH \n")
576  fout.write("echo \"Job ended at \" `date` \n")
577  fout.write("exit 0 \n")
578 
579  fout.close()
580 
581  def getOutputFileName(self):
582 
583  return os.path.join(self.OUTDIR,self.output_full_name+".root")
584 
585  def submit(self):
586 
587  print("submit job", self.job_id)
588  job_name = self.output_full_name
589  submitcommand1 = "chmod u+x " + os.path.join(self.LSF_dir,self.output_LSF_name)
590  child1 = os.system(submitcommand1)
591  #submitcommand2 = "bsub < "+os.path.join(self.LSF_dir,self.output_LSF_name)
592  #child2 = os.system(submitcommand2)
593  self.batch_job_id = getCommandOutput("bsub < "+os.path.join(self.LSF_dir,self.output_LSF_name))
594 
595  def getBatchjobId(self):
596 
597  return self.batch_job_id.split("<")[1].split(">")[0]
598 
599 
600 def main():
601 
602 
603 
604  if not check_proxy():
605  print("Please create proxy via 'voms-proxy-init -voms cms -rfc'.")
606  sys.exit(1)
607 
608 
609  forward_proxy(".")
610 
611  global CopyRights
612  print('\n'+CopyRights)
613 
614  HOME = os.environ.get('HOME')
615 
616  # CMSSW section
617  input_CMSSW_BASE = os.environ.get('CMSSW_BASE')
618  AnalysisStep_dir = os.path.join(input_CMSSW_BASE,"src/Alignment/OfflineValidation/test")
619  lib_path = os.path.abspath(AnalysisStep_dir)
620  sys.path.append(lib_path)
621 
622 
623  srcFiles = []
624 
625  desc="""This is a description of %prog."""
626  parser = OptionParser(description=desc,version='%prog version 0.1')
627  parser.add_option('-s','--submit', help='job submitted', dest='submit', action='store_true', default=False)
628  parser.add_option('-j','--jobname', help='task name', dest='taskname', action='store', default='myTask')
629  parser.add_option('-D','--dataset', help='selected dataset', dest='data', action='store', default='')
630  parser.add_option('-r','--doRunBased',help='selected dataset', dest='doRunBased', action='store_true' , default=False)
631  parser.add_option('-i','--input', help='set input configuration (overrides default)', dest='inputconfig',action='store',default=None)
632  parser.add_option('-b','--begin', help='starting point', dest='start', action='store', default='1')
633  parser.add_option('-e','--end', help='ending point', dest='end', action='store', default='999999')
634  parser.add_option('-v','--verbose', help='verbose output', dest='verbose', action='store_true', default=False)
635  parser.add_option('-u','--unitTest', help='unit tests?', dest='isUnitTest', action='store_true', default=False)
636  parser.add_option('-I','--instance', help='DAS instance to use', dest='instance', action='store', default=None)
637  (opts, args) = parser.parse_args()
638 
639  now = datetime.datetime.now()
640  #t = now.strftime("test_%Y_%m_%d_%H_%M_%S_")
641  #t = "2016UltraLegacy"
642  #t = "2017UltraLegacy"
643  #t = "2018UltraLegacy"
644  t=""
645  t+=opts.taskname
646 
647  USER = os.environ.get('USER')
648  eosdir=os.path.join("/store/group/alca_trackeralign",USER,"test_out",t)
649 
650  if opts.submit:
651  mkdir_eos(eosdir)
652  else:
653  print("Not going to create EOS folder. -s option has not been chosen")
654 
655 
656 
657  jobName = []
658  isMC = []
659  isDA = []
660  doRunBased = []
661  maxevents = []
662 
663  gt = []
664  allFromGT = []
665  applyEXTRACOND = []
666  extraCondVect = []
667  alignmentDB = []
668  alignmentTAG = []
669  apeDB = []
670  apeTAG = []
671  applyBOWS = []
672  bowDB = []
673  bowTAG = []
674  conditions = []
675 
676  vertextype = []
677  tracktype = []
678  refittertype = []
679  ttrhtype = []
680 
681  applyruncontrol = []
682  ptcut = []
683  runboundary = []
684  lumilist = []
685 
686  ConfigFile = opts.inputconfig
687 
688  if ConfigFile is not None:
689 
690  print("********************************************************")
691  print("* Parsing from input file:", ConfigFile," ")
692 
694  config.read(ConfigFile)
695 
696  print("Parsed the following configuration \n\n")
697  inputDict = as_dict(config)
698  pprint.pprint(inputDict)
699 
700  if(not bool(inputDict)):
701  raise SystemExit("\n\n ERROR! Could not parse any input file, perhaps you are submitting this from the wrong folder? \n\n")
702 
703  #print config.sections()
704 
705  # please notice: since in principle one wants to run on several different samples simultaneously,
706  # all these inputs are vectors
707 
708  doRunBased = opts.doRunBased
709 
710  listOfValidations = config.getResultingSection("validations")
711 
712  for item in listOfValidations:
713  if (bool(listOfValidations[item]) == True):
714 
715  jobName.append(ConfigSectionMap(config,"Conditions:"+item)['jobname'])
716  isDA.append(ConfigSectionMap(config,"Job")['isda'])
717  isMC.append(ConfigSectionMap(config,"Job")['ismc'])
718  maxevents.append(ConfigSectionMap(config,"Job")['maxevents'])
719 
720  gt.append(ConfigSectionMap(config,"Conditions:"+item)['gt'])
721  allFromGT.append(ConfigSectionMap(config,"Conditions:"+item)['allFromGT'])
722  applyEXTRACOND.append(ConfigSectionMap(config,"Conditions:"+item)['applyextracond'])
723  conditions.append(config.getResultingSection("ExtraConditions"))
724 
725  alignmentDB.append(ConfigSectionMap(config,"Conditions:"+item)['alignmentdb'])
726  alignmentTAG.append(ConfigSectionMap(config,"Conditions:"+item)['alignmenttag'])
727  apeDB.append(ConfigSectionMap(config,"Conditions:"+item)['apedb'])
728  apeTAG.append(ConfigSectionMap(config,"Conditions:"+item)['apetag'])
729  applyBOWS.append(ConfigSectionMap(config,"Conditions:"+item)['applybows'])
730  bowDB.append(ConfigSectionMap(config,"Conditions:"+item)['bowdb'])
731  bowTAG.append(ConfigSectionMap(config,"Conditions:"+item)['bowtag'])
732 
733  vertextype.append(ConfigSectionMap(config,"Type")['vertextype'])
734  tracktype.append(ConfigSectionMap(config,"Type")['tracktype'])
735 
736 
737 
738  if(config.exists("Refit","refittertype")):
739  refittertype.append(ConfigSectionMap(config,"Refit")['refittertype'])
740  else:
741  refittertype.append(str(RefitType.COMMON))
742 
743  if(config.exists("Refit","ttrhtype")):
744  ttrhtype.append(ConfigSectionMap(config,"Refit")['ttrhtype'])
745  else:
746  ttrhtype.append("WithAngleAndTemplate")
747 
748  applyruncontrol.append(ConfigSectionMap(config,"Selection")['applyruncontrol'])
749  ptcut.append(ConfigSectionMap(config,"Selection")['ptcut'])
750  runboundary.append(ConfigSectionMap(config,"Selection")['runboundary'])
751  lumilist.append(ConfigSectionMap(config,"Selection")['lumilist'])
752  else :
753 
754  print("********************************************************")
755  print("* Parsing from command line *")
756  print("********************************************************")
757 
758  jobName = ['testing']
759  isDA = ['True']
760  isMC = ['True']
761  doRunBased = opts.doRunBased
762  maxevents = ['10000']
763 
764  gt = ['74X_dataRun2_Prompt_v4']
765  allFromGT = ['False']
766  applyEXTRACOND = ['False']
767  conditions = [[('SiPixelTemplateDBObjectRcd','frontier://FrontierProd/CMS_CONDITIONS','SiPixelTemplateDBObject_38T_2015_v3_hltvalidation')]]
768  alignmentDB = ['frontier://FrontierProd/CMS_CONDITIONS']
769  alignmentTAG = ['TrackerAlignment_Prompt']
770  apeDB = ['frontier://FrontierProd/CMS_CONDITIONS']
771  apeTAG = ['TrackerAlignmentExtendedErr_2009_v2_express_IOVs']
772  applyBOWS = ['True']
773  bowDB = ['frontier://FrontierProd/CMS_CONDITIONS']
774  bowTAG = ['TrackerSurafceDeformations_v1_express']
775 
776  vertextype = ['offlinePrimaryVertices']
777  tracktype = ['ALCARECOTkAlMinBias']
778 
779  applyruncontrol = ['False']
780  ptcut = ['3']
781  runboundary = ['1']
782  lumilist = ['']
783 
784  # print some of the configuration
785 
786  print("********************************************************")
787  print("* Configuration info *")
788  print("********************************************************")
789  print("- submitted : ",opts.submit)
790  print("- taskname : ",opts.taskname)
791  print("- Jobname : ",jobName)
792  print("- use DA : ",isDA)
793  print("- is MC : ",isMC)
794  print("- is run-based: ",doRunBased)
795  print("- evts/job : ",maxevents)
796  print("- GlobatTag : ",gt)
797  print("- allFromGT? : ",allFromGT)
798  print("- extraCond? : ",applyEXTRACOND)
799  print("- extraCond : ",conditions)
800  print("- Align db : ",alignmentDB)
801  print("- Align tag : ",alignmentTAG)
802  print("- APE db : ",apeDB)
803  print("- APE tag : ",apeTAG)
804  print("- use bows? : ",applyBOWS)
805  print("- K&B db : ",bowDB)
806  print("- K&B tag : ",bowTAG)
807  print("- VertexColl : ",vertextype)
808  print("- TrackColl : ",tracktype)
809  print("- RefitterSeq : ",refittertype)
810  print("- TTRHBuilder : ",ttrhtype)
811  print("- RunControl? : ",applyruncontrol)
812  print("- Pt> ",ptcut)
813  print("- run= ",runboundary)
814  print("- JSON : ",lumilist)
815  print("- Out Dir : ",eosdir)
816 
817  print("********************************************************")
818  print("Will run on",len(jobName),"workflows")
819 
820  myRuns = []
821  mylist = {}
822 
823  if(doRunBased):
824  print(">>>> This is Data!")
825  print(">>>> Doing run based selection")
826  cmd = 'dasgoclient -limit=0 -query \'run dataset='+opts.data + (' instance='+opts.instance+'\'' if (opts.instance is not None) else '\'')
827  p = Popen(cmd , shell=True, stdout=PIPE, stderr=PIPE)
828  out, err = p.communicate()
829  #print(out)
830  listOfRuns=out.decode().split("\n")
831  listOfRuns.pop()
832  listOfRuns.sort()
833  print("Will run on ",len(listOfRuns),"runs: \n",listOfRuns)
834 
835  mytuple=[]
836 
837  print("first run:",opts.start,"last run:",opts.end)
838 
839  for run in listOfRuns:
840  if (int(run)<int(opts.start) or int(run)>int(opts.end)):
841  print("excluding",run)
842  continue
843 
844  if not isInJSON(run,lumilist[0]):
845  continue
846 
847  else:
848  print("'======> taking",run)
849  #print "preparing run",run
850  #if(int(run)%100==0):
851  mytuple.append((run,opts.data))
852 
853  #print mytuple
854 
855  instances=[opts.instance for entry in mytuple]
856  pool = multiprocessing.Pool(processes=20) # start 20 worker processes
857  count = pool.map(getFilesForRun,zip(mytuple,instances))
858  file_info = dict(zip(listOfRuns, count))
859 
860  #print file_info
861 
862  for run in listOfRuns:
863  if (int(run)<int(opts.start) or int(run)>int(opts.end)):
864  print('rejecting run',run,' becasue outside of boundaries')
865  continue
866 
867  if not isInJSON(run,lumilist[0]):
868  print('rejecting run',run,' becasue outside not in JSON')
869  continue
870 
871  #if(int(run)%100==0):
872  # print "preparing run",run
873  myRuns.append(run)
874  #cmd2 = ' das_client --limit=0 --query \'file run='+run+' dataset='+opts.data+'\''
875  #q = Popen(cmd2 , shell=True, stdout=PIPE, stderr=PIPE)
876  #out2, err2 = q.communicate()
877 
878  #out2=getFilesForRun((run,opts.data))
879  #print out2
880  #pool.map(getFilesForRun,run,opts.data)
881 
882 
883  #if run in file_info:
884  #mylist[run] = file_info[run]
885  #print run,mylist[run]
886  #mylist[run] = out2.split('\n')
887  #print mylist
888  #mylist[run].pop()
889  #print mylist
890 
891  od = collections.OrderedDict(sorted(file_info.items()))
892  # print od
893 
894 
895  if(len(myRuns)==0):
896  if(opts.isUnitTest):
897  print('\n')
898  print('=' * 70)
899  print("|| WARNING: won't run on any run, probably DAS returned an empty query,\n|| but that's fine because this is a unit test!")
900  print('=' * 70)
901  print('\n')
902  sys.exit(0)
903  else:
904  raise Exception('Will not run on any run.... please check again the configuration')
905  else:
906  # get from the DB the int luminosities
907  myLumiDB = getLuminosity(HOME,myRuns[0],myRuns[-1],doRunBased,opts.verbose)
908 
909  if(opts.verbose):
910  pprint.pprint(myLumiDB)
911 
912  # start loop on samples
913  for iConf in range(len(jobName)):
914  print("This is Task n.",iConf+1,"of",len(jobName))
915 
916 
917 
918  # for hadd script
919  scripts_dir = os.path.join(AnalysisStep_dir,"scripts")
920  if not os.path.exists(scripts_dir):
921  os.makedirs(scripts_dir)
922  hadd_script_file = os.path.join(scripts_dir,jobName[iConf]+"_"+opts.taskname+".sh")
923  fout = open(hadd_script_file,'w')
924 
925  output_file_list1=list()
926  output_file_list2=list()
927  output_file_list2.append("hadd ")
928 
929  inputFiles = []
930 
931  if (to_bool(isMC[iConf]) or (not to_bool(doRunBased))):
932  if(to_bool(isMC[iConf])):
933  print("this is MC")
934  cmd = 'dasgoclient -query \'file dataset='+opts.data+ (' instance='+opts.instance+'\'' if (opts.instance is not None) else '\'')
935  s = Popen(cmd , shell=True, stdout=PIPE, stderr=PIPE)
936  out,err = s.communicate()
937  mylist = out.decode().split('\n')
938  mylist.pop()
939  #print mylist
940 
941  splitList = split(mylist,10)
942  for files in splitList:
943  inputFiles.append(files)
944  myRuns.append(str(1))
945  else:
946  print("this is DATA (not doing full run-based selection)")
947  print(runboundary[iConf])
948  cmd = 'dasgoclient -query \'file dataset='+opts.data+' run='+runboundary[iConf]+ (' instance='+opts.instance+'\'' if (opts.instance is not None) else '\'')
949  #print cmd
950  s = Popen(cmd , shell=True, stdout=PIPE, stderr=PIPE)
951  out,err = s.communicate()
952  #print(out)
953  mylist = out.decode().split('\n')
954  mylist.pop()
955  #print "len(mylist):",len(mylist)
956  print("mylist:",mylist)
957 
958  splitList = split(mylist,10)
959  for files in splitList:
960  inputFiles.append(files)
961  myRuns.append(str(runboundary[iConf]))
962 
963  myLumiDB = getLuminosity(HOME,myRuns[0],myRuns[-1],True,opts.verbose)
964 
965  else:
966  #pass
967  for element in od:
968  #print mylist[element]
969  inputFiles.append(od[element])
970  #print element,od[element]
971  #print mylist
972 
973  #print inputFiles
974 
975 
976  batchJobIds = []
977  mergedFile = None
978 
979  if(opts.verbose):
980  print("myRuns =====>",myRuns)
981 
982  totalJobs=0
983  theBashDir=None
984  theBaseName=None
985 
986  for jobN,theSrcFiles in enumerate(inputFiles):
987  if(opts.verbose):
988  print("JOB:",jobN,"run",myRuns[jobN],theSrcFiles)
989  else:
990  print("JOB:",jobN,"run",myRuns[jobN])
991  thejobIndex=None
992  theLumi='1'
993 
994  #if(to_bool(isMC[iConf]) and (not to_bool(doRunBased))):
995  if(to_bool(isMC[iConf])):
996  thejobIndex=jobN
997  else:
998  if(doRunBased):
999  thejobIndex=myRuns[jobN]
1000  else:
1001  thejobIndex=myRuns[jobN]+"_"+str(jobN)
1002 
1003  if (myRuns[jobN]) in myLumiDB:
1004  theLumi = myLumiDB[myRuns[jobN]]
1005  else:
1006  print("=====> COULD NOT FIND LUMI, setting default = 1/pb")
1007  theLumi='1'
1008  print("int. lumi:",theLumi,"/pb")
1009 
1010  #print 'the configuration is:',iConf,' theJobIndex is:',thejobIndex
1011  #print applyBOWS[iConf],applyEXTRACOND[iConf],conditions[iConf]
1012 
1013  runInfo = {}
1014  runInfo["run"] = myRuns[jobN]
1015  #runInfo["runevents"] = getNEvents(myRuns[jobN],opts.data)
1016  runInfo["conf"] = jobName[iConf]
1017  runInfo["gt"] = gt[iConf]
1018  runInfo["allFromGT"] = allFromGT[iConf]
1019  runInfo["alignmentDB"] = alignmentDB[iConf]
1020  runInfo["alignmentTag"] = alignmentTAG[iConf]
1021  runInfo["apeDB"] = apeDB[iConf]
1022  runInfo["apeTag"] = apeTAG[iConf]
1023  runInfo["applyBows"] = applyBOWS[iConf]
1024  runInfo["bowDB"] = bowDB[iConf]
1025  runInfo["bowTag"] = bowTAG[iConf]
1026  runInfo["ptCut"] = ptcut[iConf]
1027  runInfo["lumilist"] = lumilist[iConf]
1028  runInfo["applyEXTRACOND"] = applyEXTRACOND[iConf]
1029  runInfo["conditions"] = conditions[iConf]
1030  runInfo["nfiles"] = len(theSrcFiles)
1031  runInfo["srcFiles"] = theSrcFiles
1032  runInfo["intLumi"] = theLumi
1033 
1034  updateDB(((iConf+1)*10)+(jobN+1),runInfo)
1035 
1036  totalJobs=totalJobs+1
1037 
1038  aJob = Job(opts.data,
1039  jobN,
1040  thejobIndex,
1041  jobName[iConf],isDA[iConf],isMC[iConf],
1042  applyBOWS[iConf],applyEXTRACOND[iConf],conditions[iConf],
1043  myRuns[jobN], lumilist[iConf], theLumi, maxevents[iConf],
1044  gt[iConf],allFromGT[iConf],
1045  alignmentDB[iConf], alignmentTAG[iConf],
1046  apeDB[iConf], apeTAG[iConf],
1047  bowDB[iConf], bowTAG[iConf],
1048  vertextype[iConf], tracktype[iConf],
1049  refittertype[iConf], ttrhtype[iConf],
1050  applyruncontrol[iConf],
1051  ptcut[iConf],input_CMSSW_BASE,AnalysisStep_dir)
1052 
1053  aJob.setEOSout(eosdir)
1054  aJob.createTheCfgFile(theSrcFiles)
1055  aJob.createTheBashFile()
1056 
1057  output_file_list1.append("xrdcp root://eoscms//eos/cms"+aJob.getOutputFileName()+" /tmp/$USER/"+opts.taskname+" \n")
1058  if jobN == 0:
1059  theBashDir=aJob.BASH_dir
1060  theBaseName=aJob.getOutputBaseNameWithData()
1061  mergedFile = "/tmp/$USER/"+opts.taskname+"/"+aJob.getOutputBaseName()+" "+opts.taskname+".root"
1062  output_file_list2.append("/tmp/$USER/"+opts.taskname+"/"+aJob.getOutputBaseName()+opts.taskname+".root ")
1063  output_file_list2.append("/tmp/$USER/"+opts.taskname+"/"+os.path.split(aJob.getOutputFileName())[1]+" ")
1064  del aJob
1065 
1066  job_submit_file = write_HTCondor_submit_file(theBashDir,theBaseName,totalJobs,None)
1067 
1068  if opts.submit:
1069  os.system("chmod u+x "+theBashDir+"/*.sh")
1070  submissionCommand = "condor_submit "+job_submit_file
1071  submissionOutput = getCommandOutput(submissionCommand)
1072  print(submissionOutput)
1073 
1074  fout.write("#!/bin/bash \n")
1075  fout.write("MAIL=$USER@mail.cern.ch \n")
1076  fout.write("OUT_DIR="+eosdir+"\n")
1077  fout.write("FILE="+str(mergedFile)+"\n")
1078  fout.write("echo $HOST | mail -s \"Harvesting job started\" $USER@mail.cern.ch \n")
1079  fout.write("cd "+os.path.join(input_CMSSW_BASE,"src")+"\n")
1080  fout.write("eval `scram r -sh` \n")
1081  fout.write("mkdir -p /tmp/$USER/"+opts.taskname+" \n")
1082  fout.writelines(output_file_list1)
1083  fout.writelines(output_file_list2)
1084  fout.write("\n")
1085  fout.write("echo \"xrdcp -f $FILE root://eoscms//eos/cms$OUT_DIR\" \n")
1086  fout.write("xrdcp -f $FILE root://eoscms//eos/cms$OUT_DIR \n")
1087  fout.write("echo \"Harvesting for "+opts.taskname+" task is complete; please find output at $OUT_DIR \" | mail -s \"Harvesting for " +opts.taskname +" completed\" $MAIL \n")
1088 
1089  os.system("chmod u+x "+hadd_script_file)
1090 
1091  harvest_conditions = '"' + " && ".join(["ended(" + jobId + ")" for jobId in batchJobIds]) + '"'
1092  print(harvest_conditions)
1093  lastJobCommand = "bsub -o harvester"+opts.taskname+".tmp -q 1nh -w "+harvest_conditions+" "+hadd_script_file
1094  print(lastJobCommand)
1095  if opts.submit:
1096  lastJobOutput = getCommandOutput(lastJobCommand)
1097  print(lastJobOutput)
1098 
1099  fout.close()
1100  del output_file_list1
1101 
1102 
1103 if __name__ == "__main__":
1104  main()
1105 
1106 
1107 
submitPVValidationJobs.Job.createTheBashFile
def createTheBashFile(self)
Definition: submitPVValidationJobs.py:538
FastTimerService_cff.range
range
Definition: FastTimerService_cff.py:34
submitPVValidationJobs.Job.getOutputBaseNameWithData
def getOutputBaseNameWithData(self)
Definition: submitPVValidationJobs.py:428
submitPVValidationJobs.Job.outputCfgName
outputCfgName
Definition: submitPVValidationJobs.py:406
submitPVValidationJobs.mkdir_eos
def mkdir_eos(out_path)
method to create recursively directories on EOS #############
Definition: submitPVValidationJobs.py:330
submitPVValidationJobs.Job.getOutputFileName
def getOutputFileName(self)
Definition: submitPVValidationJobs.py:580
runGCPTkAlMap.title
string title
Definition: runGCPTkAlMap.py:94
electrons_cff.bool
bool
Definition: electrons_cff.py:366
submitPVValidationJobs.Job.tracktype
tracktype
Definition: submitPVValidationJobs.py:393
submitPVValidationJobs.Job.CMSSW_dir
CMSSW_dir
Definition: submitPVValidationJobs.py:400
submitPVValidationJobs.Job.ttrhtype
ttrhtype
Definition: submitPVValidationJobs.py:395
submitPVValidationJobs.Job.output_BASH_name
output_BASH_name
Definition: submitPVValidationJobs.py:412
submitPVValidationJobs.Job.the_dir
the_dir
Definition: submitPVValidationJobs.py:399
submitPVValidationJobs.updateDB
def updateDB(run, runInfo)
Definition: submitPVValidationJobs.py:237
submitPVValidationJobs.isInJSON
def isInJSON(run, jsonfile)
Definition: submitPVValidationJobs.py:184
submitPVValidationJobs.BetterConfigParser.getResultingSection
def getResultingSection(self, section, defaultDict={}, demandPars=[])
Definition: submitPVValidationJobs.py:285
digitizers_cfi.strip
strip
Definition: digitizers_cfi.py:19
submitPVValidationJobs.Job.lfn_list
lfn_list
Definition: submitPVValidationJobs.py:414
join
static std::string join(char **cmd)
Definition: RemoteFile.cc:17
submitPVValidationJobs.Job.createTheCfgFile
def createTheCfgFile(self, lfn)
Definition: submitPVValidationJobs.py:432
submitPVValidationJobs.Job.submit
def submit(self)
Definition: submitPVValidationJobs.py:584
if
if(0==first)
Definition: CAHitNtupletGeneratorKernelsImpl.h:48
submitPVValidationJobs.to_bool
def to_bool(value)
Definition: submitPVValidationJobs.py:206
submitPVValidationJobs.Job.__init__
def __init__(self, dataset, job_number, job_id, job_name, isDA, isMC, applyBOWS, applyEXTRACOND, extraconditions, runboundary, lumilist, intlumi, maxevents, gt, allFromGT, alignmentDB, alignmentTAG, apeDB, apeTAG, bowDB, bowTAG, vertextype, tracktype, refittertype, ttrhtype, applyruncontrol, ptcut, CMSSW_dir, the_dir)
Definition: submitPVValidationJobs.py:364
submitPVValidationJobs.Job.getOutputBaseName
def getOutputBaseName(self)
Definition: submitPVValidationJobs.py:424
submitPVValidationJobs.Job.isMC
isMC
Definition: submitPVValidationJobs.py:376
submitPVValidationJobs.Job.output_number_name
output_number_name
Definition: submitPVValidationJobs.py:403
submitPVValidationJobs.Job.cfg_dir
cfg_dir
Definition: submitPVValidationJobs.py:405
submitPVValidationJobs.Job.LSF_dir
LSF_dir
Definition: submitPVValidationJobs.py:409
submitPVValidationJobs.updateDB2
def updateDB2()
Definition: submitPVValidationJobs.py:218
submitPVValidationJobs.main
def main()
Definition: submitPVValidationJobs.py:600
submitPVValidationJobs.Job.BASH_dir
BASH_dir
Definition: submitPVValidationJobs.py:410
submitPVValidationJobs.getNEvents
def getNEvents(run, dataset)
Definition: submitPVValidationJobs.py:139
submitPVValidationJobs.Job.isDA
isDA
Definition: submitPVValidationJobs.py:375
submitPVValidationJobs.Job.output_LSF_name
output_LSF_name
Definition: submitPVValidationJobs.py:411
submitPVValidationJobs.Job.vertextype
vertextype
Definition: submitPVValidationJobs.py:392
submitPVValidationJobs.Job.gt
gt
Definition: submitPVValidationJobs.py:384
submitPVValidationJobs.Job.extraCondVect
extraCondVect
Definition: submitPVValidationJobs.py:379
submitPVValidationJobs.split
def split(sequence, size)
Definition: submitPVValidationJobs.py:352
submitPVValidationJobs.Job.alignmentDB
alignmentDB
Definition: submitPVValidationJobs.py:386
str
#define str(s)
Definition: TestProcessor.cc:52
submitPVValidationJobs.Job.batch_job_id
batch_job_id
Definition: submitPVValidationJobs.py:372
submitPVValidationJobs.ConfigSectionMap
def ConfigSectionMap(config, section)
Definition: submitPVValidationJobs.py:316
sistrip::SpyUtilities::isValid
const bool isValid(const Frame &aFrame, const FrameQuality &aQuality, const uint16_t aExpectedPos)
Definition: SiStripSpyUtilities.cc:124
submitPVValidationJobs.BetterConfigParser.optionxform
def optionxform(self, optionstr)
Definition: submitPVValidationJobs.py:255
submitPVValidationJobs.getLuminosity
def getLuminosity(homedir, minRun, maxRun, isRunBased, verbose)
Definition: submitPVValidationJobs.py:145
submitPVValidationJobs.Job.output_full_name
output_full_name
Definition: submitPVValidationJobs.py:402
submitPVValidationJobs.Job.job_id
job_id
Definition: submitPVValidationJobs.py:371
print
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:46
submitPVValidationJobs.Job.bowTAG
bowTAG
Definition: submitPVValidationJobs.py:391
Exception
submitPVValidationJobs.BetterConfigParser.__updateDict
def __updateDict(self, dictionary, section)
Definition: submitPVValidationJobs.py:270
submitPVValidationJobs.Job.ptcut
ptcut
Definition: submitPVValidationJobs.py:397
createfilelist.int
int
Definition: createfilelist.py:10
submitPVValidationJobs.Job.job_number
job_number
Definition: submitPVValidationJobs.py:370
submitPVValidationJobs.Job.getBatchjobId
def getBatchjobId(self)
Definition: submitPVValidationJobs.py:594
submitPVValidationJobs.Job.alignmentTAG
alignmentTAG
Definition: submitPVValidationJobs.py:387
submitPVValidationJobs.check_proxy
def check_proxy()
Definition: submitPVValidationJobs.py:48
submitPVValidationJobs.Job.maxevents
maxevents
Definition: submitPVValidationJobs.py:383
submitPVValidationJobs.Job.applyruncontrol
applyruncontrol
Definition: submitPVValidationJobs.py:396
main
Definition: main.py:1
ComparisonHelper::zip
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
Definition: L1TStage2CaloLayer1.h:41
submitPVValidationJobs.Job.setEOSout
def setEOSout(self, theEOSdir)
Definition: submitPVValidationJobs.py:420
submitPVValidationJobs.RefitType
Definition: submitPVValidationJobs.py:37
submitPVValidationJobs.Job.allFromGT
allFromGT
Definition: submitPVValidationJobs.py:385
submitPVValidationJobs.Job
aux generator function to split lists based on http://sandrotosi.blogspot.com/2011/04/python-group-li...
Definition: submitPVValidationJobs.py:362
submitPVValidationJobs.Job.applyBOWS
applyBOWS
Definition: submitPVValidationJobs.py:377
submitPVValidationJobs.Job.OUTDIR
OUTDIR
Definition: submitPVValidationJobs.py:422
submitPVValidationJobs.BetterConfigParser.exists
def exists(self, section, option)
Definition: submitPVValidationJobs.py:259
submitPVValidationJobs.Job.bowDB
bowDB
Definition: submitPVValidationJobs.py:390
submitPVValidationJobs.forward_proxy
def forward_proxy(rundir)
Definition: submitPVValidationJobs.py:61
format
submitPVValidationJobs.as_dict
def as_dict(config)
Definition: submitPVValidationJobs.py:195
submitPVValidationJobs.BetterConfigParser
— Classes —############################
Definition: submitPVValidationJobs.py:252
submitPVValidationJobs.getFilesForRun
def getFilesForRun(blob)
Definition: submitPVValidationJobs.py:127
submitPVValidationJobs.getCommandOutput
def getCommandOutput(command)
Definition: submitPVValidationJobs.py:113
submitPVValidationJobs.write_HTCondor_submit_file
def write_HTCondor_submit_file(path, name, nruns, proxy_path=None)
Definition: submitPVValidationJobs.py:76
submitPVValidationJobs.Job.data
data
Definition: submitPVValidationJobs.py:369
genParticles_cff.map
map
Definition: genParticles_cff.py:11
submitPVValidationJobs.Job.__del__
def __del__(self)
Definition: submitPVValidationJobs.py:416
submitPVValidationJobs.Job.lumilist
lumilist
Definition: submitPVValidationJobs.py:381
submitPVValidationJobs.Job.applyEXTRACOND
applyEXTRACOND
Definition: submitPVValidationJobs.py:378
submitPVValidationJobs.Job.apeDB
apeDB
Definition: submitPVValidationJobs.py:388
submitPVValidationJobs.Job.runboundary
runboundary
Definition: submitPVValidationJobs.py:380
submitPVValidationJobs.Job.job_name
job_name
Definition: submitPVValidationJobs.py:373
submitPVValidationJobs.Job.refittertype
refittertype
Definition: submitPVValidationJobs.py:394
submitPVValidationJobs.Job.createTheLSFFile
def createTheLSFFile(self)
Definition: submitPVValidationJobs.py:502
submitPVValidationJobs.Job.intlumi
intlumi
Definition: submitPVValidationJobs.py:382
submitPVValidationJobs.Job.apeTAG
apeTAG
Definition: submitPVValidationJobs.py:389
python.rootplot.root2matplotlib.replace
def replace(string, replacements)
Definition: root2matplotlib.py:444