CMS 3D CMS Logo

createPayload.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 #____________________________________________________________
3 #
4 # createPayload
5 #
6 # A very simple way to create condition DB payloads
7 #
8 # Francisco Yumiceva
9 # yumiceva@fnal.gov
10 #
11 # Fermilab, 2009
12 #
13 #____________________________________________________________
14 
15 """
16  createPayload.py
17 
18  A very simple script to handle payload for beam spot results
19 
20  usage: %prog -d <data file/directory> -t <tag name>
21  -c, --copy : Only copy files from input directory to test/workflow/files/
22  -d, --data = DATA: Data file, or directory with data files.
23  -I, --IOVbase = IOVBASE: options: runbase(default), lumibase, timebase
24  -o, --overwrite : Overwrite results files when copying.
25  -O, --Output = OUTPUT: Output directory for data files (workflow directory)
26  -m, --merged : Use when data file contains combined results.
27  -n, --newarchive : Create a new archive directory when copying.
28  -t, --tag = TAG: Database tag name.
29  -T, --Test : Upload files to Test dropbox for data validation.
30  -u, --upload : Upload files to offline drop box via scp.
31  -z, --zlarge : Enlarge sigmaZ to 10 +/- 0.005 cm.
32 
33  Francisco Yumiceva (yumiceva@fnal.gov)
34  Fermilab 2010
35 
36 """
37 from __future__ import print_function
38 
39 
40 from builtins import range
41 import sys,os
42 import commands, re, time
43 import datetime
44 from CommonMethods import *
45 
46 workflowdir = 'test/workflow/'
47 workflowdirLastPayloads = workflowdir + 'lastPayloads/'
48 workflowdirTmp = workflowdir + 'tmp/'
49 workflowdirArchive = workflowdir + 'archive/'
50 optionstring = ''
51 tagType = ''
52 
54  global workflowdirArchive
55  lsCommand = ''
56  cpCommand = ''
57  listoffiles = []
58  tmplistoffiles = []
59  if path.find('castor') != -1:
60  print("Getting files from castor ...")
61  lsCommand = 'ns'
62  cpCommand = 'rf'
63  elif not os.path.exists(path):
64  exit("ERROR: File or directory " + path + " doesn't exist")
65 
66  if path[len(path)-4:len(path)] != '.txt':
67  if path[len(path)-1] != '/':
68  path = path + '/'
69 
70  aCommand = lsCommand + 'ls '+ path + " | grep .txt"
71 
72  tmpstatus = commands.getstatusoutput( aCommand )
73  tmplistoffiles = tmpstatus[1].split('\n')
74  if len(tmplistoffiles) == 1:
75  if tmplistoffiles[0] == '':
76  exit('ERROR: No files found in directory ' + path)
77  if tmplistoffiles[0].find('No such file or directory') != -1:
78  exit("ERROR: File or directory " + path + " doesn't exist")
79 
80  else:
81  tmplistoffiles.append(path[path.rfind('/')+1:len(path)])
82  path = path[0:path.rfind('/')+1]
83 
84 
85  archiveName = path
86  if path == './':
87  archiveName = os.getcwd() + '/'
88  archiveName = archiveName[archiveName[:len(archiveName)-1].rfind('/')+1:len(archiveName)]
89  if path[:len(path)-1].rfind('/') != -1:
90  archiveName = path[path[:len(path)-1].rfind('/')+1:len(path)]
91 
92  workflowdirArchive = workflowdirArchive + archiveName
93  if tagType != '' :
94  workflowdirArchive = workflowdirArchive[:len(workflowdirArchive)-1] + '_' + tagType + '/'
95  if not os.path.isdir(workflowdirArchive):
96  os.mkdir(workflowdirArchive)
97  elif(option.newarchive):
98 # tmpTime = str(datetime.datetime.now())
99 # tmpTime = tmpTime.replace(' ','-')
100 # tmpTime = tmpTime.replace('.','-')
101 # workflowdirArchive = workflowdirArchive[:len(workflowdirArchive)-1] + '_' + tmpTime + '/'
102 # os.mkdir(workflowdirArchive)
103  for n in range(1,100000):
104  tryDir = workflowdirArchive[:len(workflowdirArchive)-1] + '_' + str(n) + '/'
105  if not os.path.isdir(tryDir):
106  workflowdirArchive = tryDir
107  os.mkdir(workflowdirArchive)
108  break
109  elif n == 100000-1:
110  exit('ERROR: Unbelievable! do you ever clean ' + workflowdir + '?. I think you have to remove some directories!')
111 
112  for ifile in tmplistoffiles:
113  if ifile.find('.txt') != -1:
114  if os.path.isfile(workflowdirArchive+"/"+ifile):
115  if option.overwrite:
116  print("File " + ifile + " already exists in destination. We will overwrite it.")
117  else:
118  print("File " + ifile + " already exists in destination. Keep original file.")
119  listoffiles.append( workflowdirArchive + ifile )
120  continue
121  listoffiles.append( workflowdirArchive + ifile )
122  # copy to local disk
123  aCommand = cpCommand + 'cp '+ path + ifile + " " + workflowdirArchive
124  print(" >> " + aCommand)
125  tmpstatus = commands.getstatusoutput( aCommand )
126  return listoffiles
127 
129  global workflowdir
130  global workflowdirLastPayloads
131  global workflowdirTmp
132  global workflowdirArchive
133  if not os.path.isdir(workflowdir):
134  print("Making " + workflowdir + " directory...")
135  os.mkdir(workflowdir)
136 
137  if not os.path.isdir(workflowdirLastPayloads):
138  os.mkdir(workflowdirLastPayloads)
139  else:
140  os.system("rm -f "+ workflowdirLastPayloads + "*")
141 
142  if not os.path.isdir(workflowdirTmp):
143  os.mkdir(workflowdirTmp)
144  else:
145  os.system("rm -f "+ workflowdirTmp + "*")
146 
147  if not os.path.isdir(workflowdirArchive):
148  os.mkdir(workflowdirArchive)
149 
150 
151 if __name__ == '__main__':
152  #if len(sys.argv) < 2:
153 # print "\n [usage] createPayload <beamspot file> <tag name> <IOV since> <IOV till=-1=inf> <IOV comment> <destDB=oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT>"
154  #print " e.g. createPayload BeamFitResults_template.txt BeamSpotObjects_2009_v1_express 122745 \"\" \"beam spot for early collisions\" \"oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT\"\n"
155  #sys.exit()
156 
157 
158  # COMMAND LINE OPTIONS
159 
160  option,args = parse(__doc__)
161  if not args and not option: exit()
162 
163  workflowdir = os.getenv("CMSSW_BASE") + "/src/RecoVertex/BeamSpotProducer/test/workflow/"
164  if option.Output:
165  workflowdir = option.Output
166  if workflowdir[len(workflowdir)-1] != '/':
167  workflowdir = workflowdir + '/'
168  workflowdirLastPayloads = workflowdir + "lastPayloads/"
169  workflowdirTmp = workflowdir + "tmp/"
170  workflowdirArchive = workflowdir + "archive/"
171 
172  if ( (option.data and option.tag) or (option.data and option.copy)):
173  mkWorkflowdir()
174 
175  if not option.data:
176  print("ERROR: You must provide the data file or the a directory with data files")
177  exit()
178 
179  if option.copy:
180  copyToWorkflowdir(option.data)
181  exit("Files copied in " + workflowdirArchive)
182 
183  tagname = ''
184  if option.tag:
185  tagname = option.tag
186  if tagname.find("offline") != -1:
187  tagType = "offline"
188  elif tagname.find("prompt") != -1:
189  tagType = "prompt"
190  elif tagname.find("express") != -1 :
191  tagType = "express"
192  elif tagname.find("hlt") != -1:
193  tagType = "hlt"
194  else:
195  print("I am assuming your tag is for the offline database...")
196  tagType = "offline"
197 
198  else:
199  print("ERROR: You must provide the database tag name")
200  exit()
201 
202  IOVbase = 'runbase'
203  timetype = 'runnumber'
204  if option.IOVbase:
205  if option.IOVbase != "runbase" and option.IOVbase != "lumibase" and option.IOVbase != "timebase":
206  print("\n\n unknown iov base option: "+ option.IOVbase +" \n\n\n")
207  exit()
208  IOVbase = option.IOVbase
209 
210  listoffiles = copyToWorkflowdir(option.data)
211  # sort list of data files in chronological order
212  sortedlist = {}
213 
214  for beam_file in listoffiles:
215 
216  if len(listoffiles)==1 and option.merged:
217  mergedfile = open(beam_file)
218  alllines = mergedfile.readlines()
219  npayloads = len(alllines)/23
220  for i in range(0,npayloads):
221  block = alllines[i * 23: (i+1)*23]
222  #line = block[2]
223  #atime = time.strptime(line.split()[1] + " " + line.split()[2] + " " + line.split()[3],"%Y.%m.%d %H:%M:%S %Z")
224  line = block[0]
225  atime = line.split()[1]
226  sortedlist[atime] = block
227  break
228 
229  tmpfile = open(beam_file)
230  atime = ''
231  arun = ''
232  alumis = ''
233  skip = False
234  for line in tmpfile:
235  if line.find('Runnumber') != -1:
236  arun = line.split()[1]
237  if line.find("EndTimeOfFit") != -1:
238  atime = time.strptime(line.split()[1] + " " + line.split()[2] + " " + line.split()[3],"%Y.%m.%d %H:%M:%S %Z")
239  if line.find("LumiRange") != -1:
240  alumi = line.split()[3]
241  if line.find('Type') != -1 and line.split()[1] == '0':
242  skip = True
243  if skip:
244  print(" zero fit result, skip file " + beam_file + " with time stamp:")
245  print(" run " + arun + " lumis " + alumis)
246  else:
247  sortedlist[int(pack(int(arun), int(alumi)))] = beam_file
248 
249  tmpfile.close()
250 
251  keys = sorted(sortedlist.keys())
252 
253  # write combined data file
254  if not os.path.isdir(workflowdirArchive + "AllIOVs"):
255  os.mkdir(workflowdirArchive + "AllIOVs")
256  allbeam_file = workflowdirArchive + "AllIOVs/" + tagname + "_all_IOVs.txt"
257 # if os.path.isfile(allbeam_file):
258 
259  allfile = open( allbeam_file, 'a')
260  print(" merging all results into file: " + allbeam_file)
261 
262  # check if merged sqlite file exists
263  if os.path.exists(workflowdirArchive+"payloads/Combined.db"):
264  os.system("rm "+workflowdirArchive+"payloads/Combined.db")
265 
266 
267  nfile = 0
268  iov_since_first = '1'
269  total_files = len(keys)
270 
271  destDB = 'oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT'
272  if option.Test:
273  destDB = 'oracle://cms_orcoff_prep/CMS_COND_BEAMSPOT'
274 
275  iov_comment = 'Beam spot position'
276  for key in keys:
277 
278  iov_since = '1'
279  iov_till = ''
280 
281  suffix = "_" + str(nfile)
282  writedb_template = os.getenv("CMSSW_BASE") + "/src/RecoVertex/BeamSpotProducer/test/write2DB_template.py"
283  readdb_template = os.getenv("CMSSW_BASE") + "/src/RecoVertex/BeamSpotProducer/test/readDB_template.py"
284  sqlite_file_name = tagname + suffix
285  sqlite_file = workflowdirTmp + sqlite_file_name + '.db'
286  metadata_file = workflowdirTmp + sqlite_file_name + '.txt'
287  nfile = nfile + 1
288 
289 
290 
291  beam_file = sortedlist[key]
292  tmp_datafilename = workflowdirTmp+"tmp_datafile.txt"
293  if option.merged:
294  tmpfile = file(tmp_datafilename,'w')
295  tmpfile.writelines(sortedlist[key])
296  tmpfile.close()
297  beam_file = tmp_datafilename
298 
299  print("read input beamspot file: " + beam_file)
300  tmpfile = open(beam_file)
301  beam_file_tmp = workflowdirTmp + beam_file[beam_file.rfind('/')+1:] + ".tmp"
302  newtmpfile = open(beam_file_tmp,"w")
303  tmp_run = ""
304  tmp_lumi_since = ""
305  tmp_lumi_till = ""
306  for line in tmpfile:
307  if line.find("Runnumber") != -1:
308  iov_since = line.split()[1]
309  iov_till = iov_since
310  tmp_run = line.split()[1]
311  elif line.find("LumiRange") != -1:
312  tmp_lumi_since = line.split()[1]
313  tmp_lumi_till = line.split()[3]
314  elif line.find("BeginTimeOfFit") == -1 and line.find("EndTimeOfFit") == -1 and line.find("LumiRange") == -1:
315  if line.find("sigmaZ0") != -1 and option.zlarge:
316  line = "sigmaZ0 10\n"
317  if line.find("Cov(3,j)") != -1 and option.zlarge:
318  line = "Cov(3,j) 0 0 0 2.5e-05 0 0 0\n"
319  newtmpfile.write(line)
320  allfile.write(line)
321 
322  # pack run number and lumi section
323  if IOVbase == "lumibase":
324  timetype = "lumiid"
325  iov_since = str( pack(int(tmp_run), int(tmp_lumi_since)) )
326  iov_till = str( pack(int(tmp_run), int(tmp_lumi_till)) )
327  # keep first iov for merged output metafile
328  if nfile == 1:
329  iov_since_first = iov_since
330 
331  tmpfile.close()
332  newtmpfile.close()
333  if option.copy:
334  continue
335 
336  beam_file = beam_file_tmp
337 
338  if not writeSqliteFile(sqlite_file,tagname,timetype,beam_file,writedb_template,workflowdirTmp):
339  print("An error occurred while writing the sqlite file: " + sqlite_file)
340 
341  commands.getstatusoutput('rm -f ' + beam_file)
342 
343  readSqliteFile(sqlite_file,tagname,readdb_template,workflowdirTmp)
344 
345 
346  if not os.path.isdir(workflowdirArchive + 'payloads'):
347  os.mkdir(workflowdirArchive + 'payloads')
348 
349  print(" merge sqlite file ...")
350  appendSqliteFile("Combined.db", sqlite_file, tagname, iov_since, iov_till ,workflowdirTmp)
351 
352  # keep last payload for express, and prompt tags
353  if nfile == total_files:
354  print(" this is the last IOV. You can use this payload for express and prompt conditions.")
355  os.system("cp "+sqlite_file+ " "+workflowdirArchive+"payloads/express.db")
356  print("a copy of this payload has been placed at:")
357  print(workflowdirArchive+"payloads/express.db")
358 
359  # clean up
360  os.system("rm "+ sqlite_file)
361  print(" clean up done.")
362 
363  os.system("mv " + workflowdirTmp + "Combined.db " + workflowdirArchive + "payloads/")
364  allfile.close()
365 
366 
367 
368  print(" create MERGED payload card for dropbox ...")
369 
370  sqlite_file = workflowdirArchive+'payloads/Combined.db'
371  metadata_file = workflowdirArchive+'payloads/Combined.txt'
372  dfile = open(metadata_file,'w')
373 
374  dfile.write('destDB '+ destDB +'\n')
375  dfile.write('tag '+ tagname +'\n')
376  dfile.write('inputtag' +'\n')
377  dfile.write('since ' + iov_since_first +'\n')
378  # dfile.write('till ' + iov_till +'\n')
379  if IOVbase == "runbase":
380  dfile.write('Timetype runnumber\n')
381  elif IOVbase == "lumibase":
382  dfile.write('Timetype lumiid\n')
383  checkType = tagType
384  if tagType == "express":
385  checkType = "hlt"
386  dfile.write('IOVCheck ' + checkType + '\n')
387  dfile.write('usertext ' + iov_comment +'\n')
388 
389  dfile.close()
390 
391  uuid = commands.getstatusoutput('uuidgen -t')[1]
392  final_sqlite_file_name = tagname + '@' + uuid
393 
394  if not os.path.isdir(workflowdirArchive + 'payloads'):
395  os.mkdir(workflowdirArchive + 'payloads')
396  commands.getstatusoutput('cp ' + sqlite_file + ' ' + workflowdirArchive + 'payloads/' + final_sqlite_file_name + '.db')
397  commands.getstatusoutput('cp ' + metadata_file + ' ' + workflowdirArchive + 'payloads/' + final_sqlite_file_name + '.txt')
398 
399  commands.getstatusoutput('mv ' + sqlite_file + ' ' + workflowdirLastPayloads + final_sqlite_file_name + '.db')
400  commands.getstatusoutput('mv ' + metadata_file + ' ' + workflowdirLastPayloads + final_sqlite_file_name + '.txt')
401 
402  print(workflowdirLastPayloads + final_sqlite_file_name + '.db')
403  print(workflowdirLastPayloads + final_sqlite_file_name + '.txt')
404 
405  if option.upload:
406  print(" scp files to offline Drop Box")
407  dropbox = "/DropBox"
408  if option.Test:
409  dropbox = "/DropBox_test"
410 
411  uploadSqliteFile(workflowdirLastPayloads,final_sqlite_file_name,dropbox)
FastTimerService_cff.range
range
Definition: FastTimerService_cff.py:34
createPayload.copyToWorkflowdir
def copyToWorkflowdir(path)
Definition: createPayload.py:53
spr::find
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
Definition: FindCaloHit.cc:19
CommonMethods.uploadSqliteFile
def uploadSqliteFile(sqliteFileDirName, sqliteFileName, dropbox="/DropBox")
Definition: CommonMethods.py:939
submitPVValidationJobs.split
def split(sequence, size)
Definition: submitPVValidationJobs.py:352
str
#define str(s)
Definition: TestProcessor.cc:52
CommonMethods.readSqliteFile
def readSqliteFile(sqliteFileName, tagName, sqliteTemplateFile, tmpDir="/tmp/")
Definition: CommonMethods.py:908
dumpparser.parse
def parse(path, config)
Definition: dumpparser.py:13
print
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:46
createfilelist.int
int
Definition: createfilelist.py:10
FrontierConditions_GlobalTag_cff.file
file
Definition: FrontierConditions_GlobalTag_cff.py:13
timeUnitHelper.pack
def pack(high, low)
Definition: timeUnitHelper.py:3
createPayload.mkWorkflowdir
def mkWorkflowdir()
Definition: createPayload.py:128
CommonMethods.writeSqliteFile
def writeSqliteFile(sqliteFileName, tagName, timeType, beamSpotFile, sqliteTemplateFile, tmpDir="/tmp/")
Definition: CommonMethods.py:884
CommonMethods.appendSqliteFile
def appendSqliteFile(combinedSqliteFileName, sqliteFileName, tagName, IOVSince, IOVTill, tmpDir="/tmp/")
Definition: CommonMethods.py:931
beamvalidation.exit
def exit(msg="")
Definition: beamvalidation.py:53