18 A very simple script to handle payload for beam spot results
20 usage: %prog -d <data file/directory> -t <tag name>
21 -c, --copy : Only copy files from input directory to test/workflow/files/
22 -d, --data = DATA: Data file, or directory with data files.
23 -I, --IOVbase = IOVBASE: options: runbase(default), lumibase, timebase
24 -o, --overwrite : Overwrite results files when copying.
25 -O, --Output = OUTPUT: Output directory for data files (workflow directory)
26 -m, --merged : Use when data file contains combined results.
27 -n, --newarchive : Create a new archive directory when copying.
28 -t, --tag = TAG: Database tag name.
29 -T, --Test : Upload files to Test dropbox for data validation.
30 -u, --upload : Upload files to offline drop box via scp.
31 -z, --zlarge : Enlarge sigmaZ to 10 +/- 0.005 cm.
33 Francisco Yumiceva (yumiceva@fnal.gov)
37 from __future__
import print_function
40 from builtins
import range
42 import subprocess, re, time
44 from CommonMethods
import *
46 workflowdir =
'test/workflow/'
47 workflowdirLastPayloads = workflowdir +
'lastPayloads/'
48 workflowdirTmp = workflowdir +
'tmp/'
49 workflowdirArchive = workflowdir +
'archive/'
54 global workflowdirArchive
59 if path.find(
'castor') != -1:
60 print(
"Getting files from castor ...")
63 elif not os.path.exists(path):
64 exit(
"ERROR: File or directory " + path +
" doesn't exist")
66 if path[len(path)-4:len(path)] !=
'.txt':
67 if path[len(path)-1] !=
'/':
70 aCommand = lsCommand +
'ls '+ path +
" | grep .txt"
72 tmpstatus = subprocess.getstatusoutput( aCommand )
73 tmplistoffiles = tmpstatus[1].
split(
'\n')
74 if len(tmplistoffiles) == 1:
75 if tmplistoffiles[0] ==
'':
76 exit(
'ERROR: No files found in directory ' + path)
77 if tmplistoffiles[0].
find(
'No such file or directory') != -1:
78 exit(
"ERROR: File or directory " + path +
" doesn't exist")
81 tmplistoffiles.append(path[path.rfind(
'/')+1:len(path)])
82 path = path[0:path.rfind(
'/')+1]
87 archiveName = os.getcwd() +
'/'
88 archiveName = archiveName[archiveName[:len(archiveName)-1].rfind(
'/')+1:len(archiveName)]
89 if path[:len(path)-1].rfind(
'/') != -1:
90 archiveName = path[path[:len(path)-1].rfind(
'/')+1:len(path)]
92 workflowdirArchive = workflowdirArchive + archiveName
94 workflowdirArchive = workflowdirArchive[:len(workflowdirArchive)-1] +
'_' + tagType +
'/'
95 if not os.path.isdir(workflowdirArchive):
96 os.mkdir(workflowdirArchive)
97 elif(option.newarchive):
103 for n
in range(1,100000):
104 tryDir = workflowdirArchive[:len(workflowdirArchive)-1] +
'_' +
str(n) +
'/'
105 if not os.path.isdir(tryDir):
106 workflowdirArchive = tryDir
107 os.mkdir(workflowdirArchive)
110 exit(
'ERROR: Unbelievable! do you ever clean ' + workflowdir +
'?. I think you have to remove some directories!')
112 for ifile
in tmplistoffiles:
113 if ifile.find(
'.txt') != -1:
114 if os.path.isfile(workflowdirArchive+
"/"+ifile):
116 print(
"File " + ifile +
" already exists in destination. We will overwrite it.")
118 print(
"File " + ifile +
" already exists in destination. Keep original file.")
119 listoffiles.append( workflowdirArchive + ifile )
121 listoffiles.append( workflowdirArchive + ifile )
123 aCommand = cpCommand +
'cp '+ path + ifile +
" " + workflowdirArchive
124 print(
" >> " + aCommand)
125 tmpstatus = subprocess.getstatusoutput( aCommand )
130 global workflowdirLastPayloads
131 global workflowdirTmp
132 global workflowdirArchive
133 if not os.path.isdir(workflowdir):
134 print(
"Making " + workflowdir +
" directory...")
135 os.mkdir(workflowdir)
137 if not os.path.isdir(workflowdirLastPayloads):
138 os.mkdir(workflowdirLastPayloads)
140 os.system(
"rm -f "+ workflowdirLastPayloads +
"*")
142 if not os.path.isdir(workflowdirTmp):
143 os.mkdir(workflowdirTmp)
145 os.system(
"rm -f "+ workflowdirTmp +
"*")
147 if not os.path.isdir(workflowdirArchive):
148 os.mkdir(workflowdirArchive)
151 if __name__ ==
'__main__':
160 option,args =
parse(__doc__)
161 if not args
and not option:
exit()
163 workflowdir = os.getenv(
"CMSSW_BASE") +
"/src/RecoVertex/BeamSpotProducer/test/workflow/"
165 workflowdir = option.Output
166 if workflowdir[len(workflowdir)-1] !=
'/':
167 workflowdir = workflowdir +
'/'
168 workflowdirLastPayloads = workflowdir +
"lastPayloads/"
169 workflowdirTmp = workflowdir +
"tmp/"
170 workflowdirArchive = workflowdir +
"archive/"
172 if ( (option.data
and option.tag)
or (option.data
and option.copy)):
176 print(
"ERROR: You must provide the data file or the a directory with data files")
181 exit(
"Files copied in " + workflowdirArchive)
186 if tagname.find(
"offline") != -1:
188 elif tagname.find(
"prompt") != -1:
190 elif tagname.find(
"express") != -1 :
192 elif tagname.find(
"hlt") != -1:
195 print(
"I am assuming your tag is for the offline database...")
199 print(
"ERROR: You must provide the database tag name")
203 timetype =
'runnumber'
205 if option.IOVbase !=
"runbase" and option.IOVbase !=
"lumibase" and option.IOVbase !=
"timebase":
206 print(
"\n\n unknown iov base option: "+ option.IOVbase +
" \n\n\n")
208 IOVbase = option.IOVbase
214 for beam_file
in listoffiles:
216 if len(listoffiles)==1
and option.merged:
217 mergedfile = open(beam_file)
218 alllines = mergedfile.readlines()
219 npayloads = len(alllines)/23
220 for i
in range(0,npayloads):
221 block = alllines[i * 23: (i+1)*23]
225 atime = line.split()[1]
226 sortedlist[atime] = block
229 tmpfile = open(beam_file)
235 if line.find(
'Runnumber') != -1:
236 arun = line.split()[1]
237 if line.find(
"EndTimeOfFit") != -1:
238 atime = time.strptime(line.split()[1] +
" " + line.split()[2] +
" " + line.split()[3],
"%Y.%m.%d %H:%M:%S %Z")
239 if line.find(
"LumiRange") != -1:
240 alumi = line.split()[3]
241 if line.find(
'Type') != -1
and line.split()[1] ==
'0':
244 print(
" zero fit result, skip file " + beam_file +
" with time stamp:")
245 print(
" run " + arun +
" lumis " + alumis)
247 sortedlist[int(
pack(int(arun), int(alumi)))] = beam_file
251 keys = sorted(sortedlist.keys())
254 if not os.path.isdir(workflowdirArchive +
"AllIOVs"):
255 os.mkdir(workflowdirArchive +
"AllIOVs")
256 allbeam_file = workflowdirArchive +
"AllIOVs/" + tagname +
"_all_IOVs.txt"
259 allfile = open( allbeam_file,
'a')
260 print(
" merging all results into file: " + allbeam_file)
263 if os.path.exists(workflowdirArchive+
"payloads/Combined.db"):
264 os.system(
"rm "+workflowdirArchive+
"payloads/Combined.db")
268 iov_since_first =
'1'
269 total_files = len(keys)
271 destDB =
'oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT'
273 destDB =
'oracle://cms_orcoff_prep/CMS_COND_BEAMSPOT'
275 iov_comment =
'Beam spot position'
282 writedb_template = os.getenv(
"CMSSW_BASE") +
"/src/RecoVertex/BeamSpotProducer/test/write2DB_template.py"
283 readdb_template = os.getenv(
"CMSSW_BASE") +
"/src/RecoVertex/BeamSpotProducer/test/readDB_template.py"
284 sqlite_file_name = tagname + suffix
285 sqlite_file = workflowdirTmp + sqlite_file_name +
'.db'
286 metadata_file = workflowdirTmp + sqlite_file_name +
'.txt'
291 beam_file = sortedlist[key]
292 tmp_datafilename = workflowdirTmp+
"tmp_datafile.txt"
294 tmpfile =
file(tmp_datafilename,
'w')
295 tmpfile.writelines(sortedlist[key])
297 beam_file = tmp_datafilename
299 print(
"read input beamspot file: " + beam_file)
300 tmpfile = open(beam_file)
301 beam_file_tmp = workflowdirTmp + beam_file[beam_file.rfind(
'/')+1:] +
".tmp"
302 newtmpfile = open(beam_file_tmp,
"w")
307 if line.find(
"Runnumber") != -1:
308 iov_since = line.split()[1]
310 tmp_run = line.split()[1]
311 elif line.find(
"LumiRange") != -1:
312 tmp_lumi_since = line.split()[1]
313 tmp_lumi_till = line.split()[3]
314 elif line.find(
"BeginTimeOfFit") == -1
and line.find(
"EndTimeOfFit") == -1
and line.find(
"LumiRange") == -1:
315 if line.find(
"sigmaZ0") != -1
and option.zlarge:
316 line =
"sigmaZ0 10\n"
317 if line.find(
"Cov(3,j)") != -1
and option.zlarge:
318 line =
"Cov(3,j) 0 0 0 2.5e-05 0 0 0\n"
319 newtmpfile.write(line)
323 if IOVbase ==
"lumibase":
325 iov_since =
str(
pack(int(tmp_run), int(tmp_lumi_since)) )
326 iov_till =
str(
pack(int(tmp_run), int(tmp_lumi_till)) )
329 iov_since_first = iov_since
336 beam_file = beam_file_tmp
338 if not writeSqliteFile(sqlite_file,tagname,timetype,beam_file,writedb_template,workflowdirTmp):
339 print(
"An error occurred while writing the sqlite file: " + sqlite_file)
341 subprocess.getstatusoutput(
'rm -f ' + beam_file)
343 readSqliteFile(sqlite_file,tagname,readdb_template,workflowdirTmp)
346 if not os.path.isdir(workflowdirArchive +
'payloads'):
347 os.mkdir(workflowdirArchive +
'payloads')
349 print(
" merge sqlite file ...")
350 appendSqliteFile(
"Combined.db", sqlite_file, tagname, iov_since, iov_till ,workflowdirTmp)
353 if nfile == total_files:
354 print(
" this is the last IOV. You can use this payload for express and prompt conditions.")
355 os.system(
"cp "+sqlite_file+
" "+workflowdirArchive+
"payloads/express.db")
356 print(
"a copy of this payload has been placed at:")
357 print(workflowdirArchive+
"payloads/express.db")
360 os.system(
"rm "+ sqlite_file)
361 print(
" clean up done.")
363 os.system(
"mv " + workflowdirTmp +
"Combined.db " + workflowdirArchive +
"payloads/")
368 print(
" create MERGED payload card for dropbox ...")
370 sqlite_file = workflowdirArchive+
'payloads/Combined.db'
371 metadata_file = workflowdirArchive+
'payloads/Combined.txt'
372 dfile = open(metadata_file,
'w')
374 dfile.write(
'destDB '+ destDB +
'\n')
375 dfile.write(
'tag '+ tagname +
'\n')
376 dfile.write(
'inputtag' +
'\n')
377 dfile.write(
'since ' + iov_since_first +
'\n')
379 if IOVbase ==
"runbase":
380 dfile.write(
'Timetype runnumber\n')
381 elif IOVbase ==
"lumibase":
382 dfile.write(
'Timetype lumiid\n')
384 if tagType ==
"express":
386 dfile.write(
'IOVCheck ' + checkType +
'\n')
387 dfile.write(
'usertext ' + iov_comment +
'\n')
391 uuid = subprocess.getstatusoutput(
'uuidgen -t')[1]
392 final_sqlite_file_name = tagname +
'@' + uuid
394 if not os.path.isdir(workflowdirArchive +
'payloads'):
395 os.mkdir(workflowdirArchive +
'payloads')
396 subprocess.getstatusoutput(
'cp ' + sqlite_file +
' ' + workflowdirArchive +
'payloads/' + final_sqlite_file_name +
'.db')
397 subprocess.getstatusoutput(
'cp ' + metadata_file +
' ' + workflowdirArchive +
'payloads/' + final_sqlite_file_name +
'.txt')
399 subprocess.getstatusoutput(
'mv ' + sqlite_file +
' ' + workflowdirLastPayloads + final_sqlite_file_name +
'.db')
400 subprocess.getstatusoutput(
'mv ' + metadata_file +
' ' + workflowdirLastPayloads + final_sqlite_file_name +
'.txt')
402 print(workflowdirLastPayloads + final_sqlite_file_name +
'.db')
403 print(workflowdirLastPayloads + final_sqlite_file_name +
'.txt')
406 print(
" scp files to offline Drop Box")
409 dropbox =
"/DropBox_test"
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
const uint16_t range(const Frame &aFrame)
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)