18 A very simple script to handle payload for beam spot results 20 usage: %prog -d <data file/directory> -t <tag name> 21 -c, --copy : Only copy files from input directory to test/workflow/files/ 22 -d, --data = DATA: Data file, or directory with data files. 23 -I, --IOVbase = IOVBASE: options: runbase(default), lumibase, timebase 24 -o, --overwrite : Overwrite results files when copying. 25 -O, --Output = OUTPUT: Output directory for data files (workflow directory) 26 -m, --merged : Use when data file contains combined results. 27 -n, --newarchive : Create a new archive directory when copying. 28 -t, --tag = TAG: Database tag name. 29 -T, --Test : Upload files to Test dropbox for data validation. 30 -u, --upload : Upload files to offline drop box via scp. 31 -z, --zlarge : Enlarge sigmaZ to 10 +/- 0.005 cm. 33 Francisco Yumiceva (yumiceva@fnal.gov) 37 from __future__
import print_function
40 from builtins
import range
42 import subprocess, re, time
44 from CommonMethods
import *
46 workflowdir =
'test/workflow/' 47 workflowdirLastPayloads = workflowdir +
'lastPayloads/' 48 workflowdirTmp = workflowdir +
'tmp/' 49 workflowdirArchive = workflowdir +
'archive/' 54 global workflowdirArchive
59 if path.find(
'castor') != -1:
60 print(
"Getting files from castor ...")
63 elif not os.path.exists(path):
64 exit(
"ERROR: File or directory " + path +
" doesn't exist")
66 if path[len(path)-4:len(path)] !=
'.txt':
67 if path[len(path)-1] !=
'/':
70 aCommand = lsCommand +
'ls '+ path +
" | grep .txt" 72 tmpstatus = subprocess.getstatusoutput( aCommand )
73 tmplistoffiles = tmpstatus[1].
split(
'\n')
74 if len(tmplistoffiles) == 1:
75 if tmplistoffiles[0] ==
'':
76 exit(
'ERROR: No files found in directory ' + path)
77 if tmplistoffiles[0].
find(
'No such file or directory') != -1:
78 exit(
"ERROR: File or directory " + path +
" doesn't exist")
81 tmplistoffiles.append(path[path.rfind(
'/')+1:len(path)])
82 path = path[0:path.rfind(
'/')+1]
87 archiveName = os.getcwd() +
'/' 88 archiveName = archiveName[archiveName[:len(archiveName)-1].rfind(
'/')+1:len(archiveName)]
89 if path[:len(path)-1].rfind(
'/') != -1:
90 archiveName = path[path[:len(path)-1].rfind(
'/')+1:len(path)]
92 workflowdirArchive = workflowdirArchive + archiveName
94 workflowdirArchive = workflowdirArchive[:len(workflowdirArchive)-1] +
'_' + tagType +
'/' 95 if not os.path.isdir(workflowdirArchive):
96 os.mkdir(workflowdirArchive)
97 elif(option.newarchive):
103 for n
in range(1,100000):
104 tryDir = workflowdirArchive[:len(workflowdirArchive)-1] +
'_' +
str(n) +
'/' 105 if not os.path.isdir(tryDir):
106 workflowdirArchive = tryDir
107 os.mkdir(workflowdirArchive)
110 exit(
'ERROR: Unbelievable! do you ever clean ' + workflowdir +
'?. I think you have to remove some directories!')
112 for ifile
in tmplistoffiles:
113 if ifile.find(
'.txt') != -1:
114 if os.path.isfile(workflowdirArchive+
"/"+ifile):
116 print(
"File " + ifile +
" already exists in destination. We will overwrite it.")
118 print(
"File " + ifile +
" already exists in destination. Keep original file.")
119 listoffiles.append( workflowdirArchive + ifile )
121 listoffiles.append( workflowdirArchive + ifile )
123 aCommand = cpCommand +
'cp '+ path + ifile +
" " + workflowdirArchive
124 print(
" >> " + aCommand)
125 tmpstatus = subprocess.getstatusoutput( aCommand )
130 global workflowdirLastPayloads
131 global workflowdirTmp
132 global workflowdirArchive
133 if not os.path.isdir(workflowdir):
134 print(
"Making " + workflowdir +
" directory...")
135 os.mkdir(workflowdir)
137 if not os.path.isdir(workflowdirLastPayloads):
138 os.mkdir(workflowdirLastPayloads)
140 os.system(
"rm -f "+ workflowdirLastPayloads +
"*")
142 if not os.path.isdir(workflowdirTmp):
143 os.mkdir(workflowdirTmp)
145 os.system(
"rm -f "+ workflowdirTmp +
"*")
147 if not os.path.isdir(workflowdirArchive):
148 os.mkdir(workflowdirArchive)
151 if __name__ ==
'__main__':
161 if not args
and not option:
exit()
163 workflowdir = os.getenv(
"CMSSW_BASE") +
"/src/RecoVertex/BeamSpotProducer/test/workflow/" 165 workflowdir = option.Output
166 if workflowdir[len(workflowdir)-1] !=
'/':
167 workflowdir = workflowdir +
'/' 168 workflowdirLastPayloads = workflowdir +
"lastPayloads/" 169 workflowdirTmp = workflowdir +
"tmp/" 170 workflowdirArchive = workflowdir +
"archive/" 172 if ( (option.data
and option.tag)
or (option.data
and option.copy)):
176 print(
"ERROR: You must provide the data file or the a directory with data files")
181 exit(
"Files copied in " + workflowdirArchive)
186 if tagname.find(
"offline") != -1:
188 elif tagname.find(
"prompt") != -1:
190 elif tagname.find(
"express") != -1 :
192 elif tagname.find(
"hlt") != -1:
195 print(
"I am assuming your tag is for the offline database...")
199 print(
"ERROR: You must provide the database tag name")
203 timetype =
'runnumber' 205 if option.IOVbase !=
"runbase" and option.IOVbase !=
"lumibase" and option.IOVbase !=
"timebase":
206 print(
"\n\n unknown iov base option: "+ option.IOVbase +
" \n\n\n")
208 IOVbase = option.IOVbase
214 for beam_file
in listoffiles:
216 if len(listoffiles)==1
and option.merged:
217 mergedfile = open(beam_file)
218 alllines = mergedfile.readlines()
219 npayloads =
int(len(alllines)/23)
220 for i
in range(0,npayloads):
221 block = alllines[i * 23: (i+1)*23]
226 if line.find(
'Runnumber') != -1:
227 arun = line.split()[1]
228 if line.find(
"EndTimeOfFit") != -1:
229 atime = time.strptime(line.split()[1] +
" " + line.split()[2] +
" " + line.split()[3],
"%Y.%m.%d %H:%M:%S %Z")
230 if line.find(
"LumiRange") != -1:
231 alumi = line.split()[3]
232 if line.find(
'Type') != -1
and line.split()[1] !=
'2':
237 tmpfile = open(beam_file)
243 if line.find(
'Runnumber') != -1:
244 arun = line.split()[1]
245 if line.find(
"EndTimeOfFit") != -1:
246 atime = time.strptime(line.split()[1] +
" " + line.split()[2] +
" " + line.split()[3],
"%Y.%m.%d %H:%M:%S %Z")
247 if line.find(
"LumiRange") != -1:
248 alumi = line.split()[3]
249 if line.find(
'Type') != -1
and line.split()[1] ==
'0':
252 print(
" zero fit result, skip file " + beam_file +
" with time stamp:")
253 print(
" run " + arun +
" lumis " + alumis)
259 keys = sorted(sortedlist.keys())
262 if not os.path.isdir(workflowdirArchive +
"AllIOVs"):
263 os.mkdir(workflowdirArchive +
"AllIOVs")
264 allbeam_file = workflowdirArchive +
"AllIOVs/" + tagname +
"_all_IOVs.txt" 267 allfile = open( allbeam_file,
'a')
268 print(
" merging all results into file: " + allbeam_file)
271 if os.path.exists(workflowdirArchive+
"payloads/Combined.db"):
272 os.system(
"rm "+workflowdirArchive+
"payloads/Combined.db")
276 iov_since_first =
'1' 277 total_files = len(keys)
279 destDB =
'oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT' 281 destDB =
'oracle://cms_orcoff_prep/CMS_COND_BEAMSPOT' 283 iov_comment =
'Beam spot position' 290 writedb_template = os.getenv(
"CMSSW_BASE") +
"/src/RecoVertex/BeamSpotProducer/test/write2DB_template.py" 291 readdb_template = os.getenv(
"CMSSW_BASE") +
"/src/RecoVertex/BeamSpotProducer/test/readDB_template.py" 292 sqlite_file_name = tagname + suffix
293 sqlite_file = workflowdirTmp + sqlite_file_name +
'.db' 294 metadata_file = workflowdirTmp + sqlite_file_name +
'.txt' 299 beam_file = sortedlist[key]
300 tmp_datafilename = workflowdirTmp+
"tmp_datafile.txt" 302 tmpfile = open(tmp_datafilename,
'w')
303 tmpfile.writelines(sortedlist[key])
305 beam_file = tmp_datafilename
307 print(
"read input beamspot file: " + beam_file)
308 tmpfile = open(beam_file)
309 beam_file_tmp = workflowdirTmp + beam_file[beam_file.rfind(
'/')+1:] +
".tmp" 310 newtmpfile = open(beam_file_tmp,
"w")
315 if line.find(
"Runnumber") != -1:
316 iov_since = line.split()[1]
318 tmp_run = line.split()[1]
319 elif line.find(
"LumiRange") != -1:
320 tmp_lumi_since = line.split()[1]
321 tmp_lumi_till = line.split()[3]
322 elif line.find(
"BeginTimeOfFit") == -1
and line.find(
"EndTimeOfFit") == -1
and line.find(
"LumiRange") == -1:
323 if line.find(
"sigmaZ0") != -1
and option.zlarge:
324 line =
"sigmaZ0 10\n" 325 if line.find(
"Cov(3,j)") != -1
and option.zlarge:
326 line =
"Cov(3,j) 0 0 0 2.5e-05 0 0 0\n" 327 newtmpfile.write(line)
331 if IOVbase ==
"lumibase":
337 iov_since_first = iov_since
344 beam_file = beam_file_tmp
346 if not writeSqliteFile(sqlite_file,tagname,timetype,beam_file,writedb_template,workflowdirTmp):
347 print(
"An error occurred while writing the sqlite file: " + sqlite_file)
349 subprocess.getstatusoutput(
'rm -f ' + beam_file)
351 readSqliteFile(sqlite_file,tagname,readdb_template,workflowdirTmp)
354 if not os.path.isdir(workflowdirArchive +
'payloads'):
355 os.mkdir(workflowdirArchive +
'payloads')
357 print(
" merge sqlite file ...")
358 appendSqliteFile(
"Combined.db", sqlite_file, tagname, iov_since, iov_till ,workflowdirTmp)
361 if nfile == total_files:
362 print(
" this is the last IOV. You can use this payload for express and prompt conditions.")
363 os.system(
"cp "+sqlite_file+
" "+workflowdirArchive+
"payloads/express.db")
364 print(
"a copy of this payload has been placed at:")
365 print(workflowdirArchive+
"payloads/express.db")
368 os.system(
"rm "+ sqlite_file)
369 print(
" clean up done.")
371 os.system(
"mv " + workflowdirTmp +
"Combined.db " + workflowdirArchive +
"payloads/")
376 print(
" create MERGED payload card for dropbox ...")
378 sqlite_file = workflowdirArchive+
'payloads/Combined.db' 379 metadata_file = workflowdirArchive+
'payloads/Combined.txt' 380 dfile = open(metadata_file,
'w')
382 dfile.write(
'destDB '+ destDB +
'\n')
383 dfile.write(
'tag '+ tagname +
'\n')
384 dfile.write(
'inputtag' +
'\n')
385 dfile.write(
'since ' + iov_since_first +
'\n')
387 if IOVbase ==
"runbase":
388 dfile.write(
'Timetype runnumber\n')
389 elif IOVbase ==
"lumibase":
390 dfile.write(
'Timetype lumiid\n')
392 if tagType ==
"express":
394 dfile.write(
'IOVCheck ' + checkType +
'\n')
395 dfile.write(
'usertext ' + iov_comment +
'\n')
399 uuid = subprocess.getstatusoutput(
'uuidgen -t')[1]
400 final_sqlite_file_name = tagname +
'@' + uuid
402 if not os.path.isdir(workflowdirArchive +
'payloads'):
403 os.mkdir(workflowdirArchive +
'payloads')
404 subprocess.getstatusoutput(
'cp ' + sqlite_file +
' ' + workflowdirArchive +
'payloads/' + final_sqlite_file_name +
'.db')
405 subprocess.getstatusoutput(
'cp ' + metadata_file +
' ' + workflowdirArchive +
'payloads/' + final_sqlite_file_name +
'.txt')
407 subprocess.getstatusoutput(
'mv ' + sqlite_file +
' ' + workflowdirLastPayloads + final_sqlite_file_name +
'.db')
408 subprocess.getstatusoutput(
'mv ' + metadata_file +
' ' + workflowdirLastPayloads + final_sqlite_file_name +
'.txt')
410 print(workflowdirLastPayloads + final_sqlite_file_name +
'.db')
411 print(workflowdirLastPayloads + final_sqlite_file_name +
'.txt')
414 print(
" scp files to offline Drop Box")
417 dropbox =
"/DropBox_test"
vector< string > parse(string line, const string &delimiter)
def copyToWorkflowdir(path)
def appendSqliteFile(combinedSqliteFileName, sqliteFileName, tagName, IOVSince, IOVTill, tmpDir="/tmp/")
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
def uploadSqliteFile(sqliteFileDirName, sqliteFileName, dropbox="/DropBox")
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
def readSqliteFile(sqliteFileName, tagName, sqliteTemplateFile, tmpDir="/tmp/")
def split(sequence, size)
def writeSqliteFile(sqliteFileName, tagName, timeType, beamSpotFile, sqliteTemplateFile, tmpDir="/tmp/")