CMS 3D CMS Logo

/data/refman/pasoursint/CMSSW_5_2_9/src/RecoVertex/BeamSpotProducer/scripts/createPayload.py

Go to the documentation of this file.
00001 #!/usr/bin/env python
00002 #____________________________________________________________
00003 #
00004 #  createPayload
00005 #
00006 # A very simple way to create condition DB payloads
00007 #
00008 # Francisco Yumiceva
00009 # yumiceva@fnal.gov
00010 #
00011 # Fermilab, 2009
00012 #
00013 #____________________________________________________________
00014 
00015 """
00016    createPayload.py
00017 
00018    A very simple script to handle payload for beam spot results
00019 
00020    usage: %prog -d <data file/directory> -t <tag name>
00021    -c, --copy   : Only copy files from input directory to test/workflow/files/
00022    -d, --data   = DATA: Data file, or directory with data files.
00023    -I, --IOVbase = IOVBASE: options: runbase(default), lumibase, timebase
00024    -o, --overwrite : Overwrite results files when copying.
00025    -O, --Output = OUTPUT: Output directory for data files (workflow directory)
00026    -m, --merged : Use when data file contains combined results.
00027    -n, --newarchive : Create a new archive directory when copying.
00028    -t, --tag    = TAG: Database tag name.
00029    -T, --Test   : Upload files to Test dropbox for data validation.
00030    -u, --upload : Upload files to offline drop box via scp.
00031    -z, --zlarge : Enlarge sigmaZ to 10 +/- 0.005 cm.
00032    
00033    Francisco Yumiceva (yumiceva@fnal.gov)
00034    Fermilab 2010
00035    
00036 """
00037 
00038 
00039 import sys,os
00040 import commands, re, time
00041 import datetime
00042 from CommonMethods import *
00043 
00044 workflowdir             = 'test/workflow/'
00045 workflowdirLastPayloads = workflowdir + 'lastPayloads/'
00046 workflowdirTmp          = workflowdir + 'tmp/'
00047 workflowdirArchive      = workflowdir + 'archive/'
00048 optionstring            = ''
00049 tagType                 = ''
00050 
00051 def copyToWorkflowdir(path):
00052     global workflowdirArchive
00053     lsCommand      = ''
00054     cpCommand      = ''
00055     listoffiles    = []
00056     tmplistoffiles = []
00057     if path.find('castor') != -1:
00058         print "Getting files from castor ..."
00059         lsCommand = 'ns'
00060         cpCommand = 'rf'
00061     elif not os.path.exists(path):
00062         exit("ERROR: File or directory " + path + " doesn't exist") 
00063 
00064     if path[len(path)-4:len(path)] != '.txt':
00065         if path[len(path)-1] != '/':
00066             path = path + '/'
00067         
00068         aCommand  = lsCommand  + 'ls '+ path + " | grep .txt"
00069 
00070         tmpstatus = commands.getstatusoutput( aCommand )
00071         tmplistoffiles = tmpstatus[1].split('\n')
00072         if len(tmplistoffiles) == 1:
00073             if tmplistoffiles[0] == '':
00074                 exit('ERROR: No files found in directory ' + path)
00075             if tmplistoffiles[0].find('No such file or directory') != -1:
00076                 exit("ERROR: File or directory " + path + " doesn't exist") 
00077             
00078     else:
00079         tmplistoffiles.append(path[path.rfind('/')+1:len(path)])
00080         path = path[0:path.rfind('/')+1]
00081 
00082  
00083     archiveName = path
00084     if path == './':
00085         archiveName = os.getcwd() + '/'
00086         archiveName = archiveName[archiveName[:len(archiveName)-1].rfind('/')+1:len(archiveName)]
00087     if path[:len(path)-1].rfind('/') != -1:
00088         archiveName = path[path[:len(path)-1].rfind('/')+1:len(path)]
00089 
00090     workflowdirArchive = workflowdirArchive + archiveName
00091     if tagType != '' :
00092         workflowdirArchive = workflowdirArchive[:len(workflowdirArchive)-1] + '_' + tagType + '/'
00093     if not os.path.isdir(workflowdirArchive):
00094         os.mkdir(workflowdirArchive)
00095     elif(option.newarchive):
00096 #        tmpTime = str(datetime.datetime.now())
00097 #        tmpTime = tmpTime.replace(' ','-')
00098 #        tmpTime = tmpTime.replace('.','-')
00099 #        workflowdirArchive = workflowdirArchive[:len(workflowdirArchive)-1] + '_' + tmpTime + '/'
00100 #        os.mkdir(workflowdirArchive)
00101         for n in range(1,100000):
00102             tryDir = workflowdirArchive[:len(workflowdirArchive)-1] + '_' + str(n) + '/'
00103             if not os.path.isdir(tryDir):
00104                 workflowdirArchive = tryDir
00105                 os.mkdir(workflowdirArchive)
00106                 break
00107             elif n == 100000-1:
00108                 exit('ERROR: Unbelievable! do you ever clean ' + workflowdir + '?. I think you have to remove some directories!') 
00109                  
00110     for ifile in tmplistoffiles:
00111         if ifile.find('.txt') != -1:
00112             if os.path.isfile(workflowdirArchive+"/"+ifile):
00113                 if option.overwrite:
00114                     print "File " + ifile + " already exists in destination. We will overwrite it."
00115                 else:
00116                     print "File " + ifile + " already exists in destination. Keep original file."
00117                     listoffiles.append( workflowdirArchive + ifile )
00118                     continue
00119             listoffiles.append( workflowdirArchive + ifile )
00120             # copy to local disk
00121             aCommand = cpCommand + 'cp '+ path + ifile + " " + workflowdirArchive
00122             print " >> " + aCommand
00123             tmpstatus = commands.getstatusoutput( aCommand )
00124     return listoffiles
00125 
00126 def mkWorkflowdir():
00127     global workflowdir
00128     global workflowdirLastPayloads
00129     global workflowdirTmp
00130     global workflowdirArchive
00131     if not os.path.isdir(workflowdir):
00132         print "Making " + workflowdir + " directory..."
00133         os.mkdir(workflowdir)
00134 
00135     if not os.path.isdir(workflowdirLastPayloads):
00136         os.mkdir(workflowdirLastPayloads)
00137     else:
00138         os.system("rm -f "+ workflowdirLastPayloads + "*")
00139        
00140     if not os.path.isdir(workflowdirTmp):
00141         os.mkdir(workflowdirTmp)
00142     else:
00143         os.system("rm -f "+ workflowdirTmp + "*")
00144 
00145     if not os.path.isdir(workflowdirArchive):
00146         os.mkdir(workflowdirArchive)
00147 
00148 ###############################################################################################3
00149 if __name__ == '__main__':
00150     #if len(sys.argv) < 2:
00151 #       print "\n [usage] createPayload <beamspot file> <tag name> <IOV since> <IOV till=-1=inf> <IOV comment> <destDB=oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT>"
00152         #print " e.g. createPayload BeamFitResults_template.txt BeamSpotObjects_2009_v1_express 122745 \"\" \"beam spot for early collisions\" \"oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT\"\n"
00153         #sys.exit()
00154 
00155     
00156      # COMMAND LINE OPTIONS
00157     #################################
00158     option,args = parse(__doc__)
00159     if not args and not option: exit()
00160 
00161     workflowdir             = os.getenv("CMSSW_BASE") + "/src/RecoVertex/BeamSpotProducer/test/workflow/"
00162     if option.Output:
00163         workflowdir = option.Output
00164         if workflowdir[len(workflowdir)-1] != '/':
00165             workflowdir = workflowdir + '/'
00166     workflowdirLastPayloads = workflowdir + "lastPayloads/"
00167     workflowdirTmp          = workflowdir + "tmp/"
00168     workflowdirArchive      = workflowdir + "archive/"
00169     
00170     if ( (option.data and option.tag) or (option.data and option.copy)):
00171         mkWorkflowdir()
00172     
00173     if not option.data:
00174         print "ERROR: You must provide the data file or the a directory with data files"
00175         exit()
00176 
00177     if option.copy:
00178       copyToWorkflowdir(option.data)
00179       exit("Files copied in " + workflowdirArchive)
00180     
00181     tagname = ''
00182     if option.tag:
00183         tagname = option.tag
00184         if tagname.find("offline") != -1:
00185             tagType = "offline"
00186         elif tagname.find("prompt") != -1:
00187             tagType = "prompt"
00188         elif tagname.find("express") != -1 :
00189             tagType = "express"
00190         elif tagname.find("hlt") != -1:
00191             tagType = "hlt"
00192         else:
00193             print "I am assuming your tag is for the offline database..."
00194             tagType = "offline"
00195 
00196     else:       
00197         print "ERROR: You must provide the database tag name"
00198         exit()
00199 
00200     IOVbase = 'runbase'
00201     timetype = 'runnumber'
00202     if option.IOVbase:
00203         if option.IOVbase != "runbase" and option.IOVbase != "lumibase" and option.IOVbase != "timebase":
00204             print "\n\n unknown iov base option: "+ option.IOVbase +" \n\n\n"
00205             exit()
00206         IOVbase = option.IOVbase
00207     
00208     listoffiles = copyToWorkflowdir(option.data)
00209     # sort list of data files in chronological order
00210     sortedlist = {}
00211 
00212     for beam_file in listoffiles:
00213 
00214         if len(listoffiles)==1 and option.merged:
00215             mergedfile = open(beam_file)
00216             alllines = mergedfile.readlines()
00217             npayloads = len(alllines)/23
00218             for i in range(0,npayloads):
00219                 block = alllines[i * 23: (i+1)*23]
00220                 #line = block[2]
00221                 #atime = time.strptime(line.split()[1] +  " " + line.split()[2] + " " + line.split()[3],"%Y.%m.%d %H:%M:%S %Z")
00222                 line = block[0]
00223                 atime = line.split()[1]
00224                 sortedlist[atime] = block
00225             break
00226         
00227         tmpfile = open(beam_file)
00228         atime = ''
00229         arun = ''
00230         alumis = ''
00231         skip = False
00232         for line in tmpfile:
00233             if line.find('Runnumber') != -1:
00234                 arun = line.split()[1]
00235             if line.find("EndTimeOfFit") != -1:
00236                 atime = time.strptime(line.split()[1] +  " " + line.split()[2] + " " + line.split()[3],"%Y.%m.%d %H:%M:%S %Z")
00237             if line.find("LumiRange") != -1:
00238                 alumi = line.split()[3]
00239             if line.find('Type') != -1 and line.split()[1] == '0':
00240                 skip = True             
00241         if skip:
00242             print " zero fit result, skip file " + beam_file + " with time stamp:"
00243             print " run " + arun + " lumis " + alumis
00244         else:
00245             sortedlist[int(pack(int(arun), int(alumi)))] = beam_file
00246                 
00247         tmpfile.close()
00248 
00249     keys = sortedlist.keys()
00250     keys.sort()
00251 
00252     # write combined data file
00253     if not os.path.isdir(workflowdirArchive + "AllIOVs"):
00254         os.mkdir(workflowdirArchive + "AllIOVs")
00255     allbeam_file = workflowdirArchive + "AllIOVs/" + tagname + "_all_IOVs.txt"
00256 #    if os.path.isfile(allbeam_file):
00257         
00258     allfile = open( allbeam_file, 'a')
00259     print " merging all results into file: " + allbeam_file
00260 
00261     # check if merged sqlite file exists
00262     if os.path.exists(workflowdirArchive+"payloads/Combined.db"):
00263         os.system("rm "+workflowdirArchive+"payloads/Combined.db")
00264     
00265     
00266     nfile = 0
00267     iov_since_first = '1'
00268     total_files = len(keys)
00269     
00270     destDB = 'oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT'
00271     if option.Test:
00272         destDB = 'oracle://cms_orcoff_prep/CMS_COND_BEAMSPOT'
00273 
00274     iov_comment = 'Beam spot position'
00275     for key in keys:
00276         
00277         iov_since = '1'
00278         iov_till = ''
00279         
00280         suffix = "_" + str(nfile)
00281         writedb_template = os.getenv("CMSSW_BASE") + "/src/RecoVertex/BeamSpotProducer/test/write2DB_template.py"
00282         readdb_template  = os.getenv("CMSSW_BASE") + "/src/RecoVertex/BeamSpotProducer/test/readDB_template.py"
00283         sqlite_file_name = tagname + suffix
00284         sqlite_file   = workflowdirTmp + sqlite_file_name + '.db'
00285         metadata_file = workflowdirTmp + sqlite_file_name + '.txt'
00286         nfile = nfile + 1
00287                
00288     #### WRITE sqlite file
00289         
00290         beam_file = sortedlist[key]
00291         tmp_datafilename = workflowdirTmp+"tmp_datafile.txt"
00292         if option.merged:
00293             tmpfile = file(tmp_datafilename,'w')
00294             tmpfile.writelines(sortedlist[key])
00295             tmpfile.close()
00296             beam_file = tmp_datafilename
00297 
00298         print "read input beamspot file: " + beam_file
00299         tmpfile = open(beam_file)
00300         beam_file_tmp = workflowdirTmp + beam_file[beam_file.rfind('/')+1:] + ".tmp"
00301         newtmpfile = open(beam_file_tmp,"w")
00302         tmp_run = ""
00303         tmp_lumi_since = ""
00304         tmp_lumi_till = ""
00305         for line in tmpfile:
00306             if line.find("Runnumber") != -1:
00307                 iov_since = line.split()[1]
00308                 iov_till = iov_since
00309                 tmp_run = line.split()[1]
00310             elif line.find("LumiRange") != -1:
00311                 tmp_lumi_since = line.split()[1]
00312                 tmp_lumi_till = line.split()[3]
00313             elif line.find("BeginTimeOfFit") == -1 and line.find("EndTimeOfFit") == -1 and line.find("LumiRange") == -1:
00314                 if line.find("sigmaZ0") != -1 and option.zlarge:
00315                     line = "sigmaZ0 10\n"
00316                 if line.find("Cov(3,j)") != -1 and option.zlarge:
00317                     line = "Cov(3,j) 0 0 0 2.5e-05 0 0 0\n"
00318                 newtmpfile.write(line)
00319             allfile.write(line)
00320 
00321         # pack run number and lumi section
00322         if IOVbase == "lumibase":
00323             timetype = "lumiid"
00324             iov_since = str( pack(int(tmp_run), int(tmp_lumi_since)) )
00325             iov_till = str( pack(int(tmp_run), int(tmp_lumi_till)) )
00326         # keep first iov for merged output metafile
00327         if nfile == 1:
00328             iov_since_first = iov_since
00329         
00330         tmpfile.close()
00331         newtmpfile.close()
00332         if option.copy:
00333             continue
00334         
00335         beam_file = beam_file_tmp
00336 
00337         if not writeSqliteFile(sqlite_file,tagname,timetype,beam_file,writedb_template,workflowdirTmp):
00338             print "An error occurred while writing the sqlite file: " + sqlite_file
00339 
00340         commands.getstatusoutput('rm -f ' + beam_file)
00341         ##### READ and check sqlite file
00342         readSqliteFile(sqlite_file,tagname,readdb_template,workflowdirTmp)
00343         
00344         #### Merge sqlite files
00345         if not os.path.isdir(workflowdirArchive + 'payloads'):
00346             os.mkdir(workflowdirArchive + 'payloads')
00347         
00348         print " merge sqlite file ..."
00349         appendSqliteFile("Combined.db", sqlite_file, tagname, iov_since, iov_till ,workflowdirTmp)
00350         
00351         # keep last payload for express, and prompt tags
00352         if nfile == total_files:
00353             print " this is the last IOV. You can use this payload for express and prompt conditions."
00354             os.system("cp "+sqlite_file+ " "+workflowdirArchive+"payloads/express.db")
00355             print "a copy of this payload has been placed at:"
00356             print workflowdirArchive+"payloads/express.db"
00357         
00358         # clean up
00359         os.system("rm "+ sqlite_file)
00360         print " clean up done."
00361 
00362     os.system("mv " + workflowdirTmp + "Combined.db " + workflowdirArchive + "payloads/")
00363     allfile.close()
00364             
00365     #### CREATE payload for merged output
00366             
00367     print " create MERGED payload card for dropbox ..."
00368     
00369     sqlite_file   = workflowdirArchive+'payloads/Combined.db'
00370     metadata_file = workflowdirArchive+'payloads/Combined.txt'
00371     dfile = open(metadata_file,'w')
00372         
00373     dfile.write('destDB '+ destDB +'\n')
00374     dfile.write('tag '+ tagname +'\n')
00375     dfile.write('inputtag' +'\n')
00376     dfile.write('since ' + iov_since_first +'\n')
00377     #        dfile.write('till ' + iov_till +'\n')
00378     if IOVbase == "runbase":
00379         dfile.write('Timetype runnumber\n')
00380     elif IOVbase == "lumibase":
00381         dfile.write('Timetype lumiid\n')
00382     checkType = tagType
00383     if tagType == "express":
00384         checkType = "hlt"
00385     dfile.write('IOVCheck ' + checkType + '\n')
00386     dfile.write('usertext ' + iov_comment +'\n')
00387         
00388     dfile.close()
00389         
00390     uuid = commands.getstatusoutput('uuidgen -t')[1]
00391     final_sqlite_file_name = tagname + '@' + uuid
00392         
00393     if not os.path.isdir(workflowdirArchive + 'payloads'):
00394         os.mkdir(workflowdirArchive + 'payloads')
00395     commands.getstatusoutput('cp ' + sqlite_file   + ' ' + workflowdirArchive + 'payloads/' + final_sqlite_file_name + '.db')
00396     commands.getstatusoutput('cp ' + metadata_file + ' ' + workflowdirArchive + 'payloads/' + final_sqlite_file_name + '.txt')
00397 
00398     commands.getstatusoutput('mv ' + sqlite_file   + ' ' + workflowdirLastPayloads + final_sqlite_file_name + '.db')
00399     commands.getstatusoutput('mv ' + metadata_file + ' ' + workflowdirLastPayloads + final_sqlite_file_name + '.txt')
00400 
00401     print workflowdirLastPayloads + final_sqlite_file_name + '.db'
00402     print workflowdirLastPayloads + final_sqlite_file_name + '.txt'
00403         
00404     if option.upload:
00405         print " scp files to offline Drop Box"
00406         dropbox = "/DropBox"
00407         if option.Test:
00408             dropbox = "/DropBox_test"
00409 
00410         uploadSqliteFile(workflowdirLastPayloads,final_sqlite_file_name,dropbox)