CMS 3D CMS Logo

createPayload.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 #____________________________________________________________
3 #
4 # createPayload
5 #
6 # A very simple way to create condition DB payloads
7 #
8 # Francisco Yumiceva
9 # yumiceva@fnal.gov
10 #
11 # Fermilab, 2009
12 #
13 #____________________________________________________________
14 
15 """
16  createPayload.py
17 
18  A very simple script to handle payload for beam spot results
19 
20  usage: %prog -d <data file/directory> -t <tag name>
21  -c, --copy : Only copy files from input directory to test/workflow/files/
22  -d, --data = DATA: Data file, or directory with data files.
23  -I, --IOVbase = IOVBASE: options: runbase(default), lumibase, timebase
24  -o, --overwrite : Overwrite results files when copying.
25  -O, --Output = OUTPUT: Output directory for data files (workflow directory)
26  -m, --merged : Use when data file contains combined results.
27  -n, --newarchive : Create a new archive directory when copying.
28  -t, --tag = TAG: Database tag name.
29  -T, --Test : Upload files to Test dropbox for data validation.
30  -u, --upload : Upload files to offline drop box via scp.
31  -z, --zlarge : Enlarge sigmaZ to 10 +/- 0.005 cm.
32 
33  Francisco Yumiceva (yumiceva@fnal.gov)
34  Fermilab 2010
35 
36 """
37 
38 
39 import sys,os
40 import commands, re, time
41 import datetime
42 from CommonMethods import *
43 
44 workflowdir = 'test/workflow/'
45 workflowdirLastPayloads = workflowdir + 'lastPayloads/'
46 workflowdirTmp = workflowdir + 'tmp/'
47 workflowdirArchive = workflowdir + 'archive/'
48 optionstring = ''
49 tagType = ''
50 
52  global workflowdirArchive
53  lsCommand = ''
54  cpCommand = ''
55  listoffiles = []
56  tmplistoffiles = []
57  if path.find('castor') != -1:
58  print "Getting files from castor ..."
59  lsCommand = 'ns'
60  cpCommand = 'rf'
61  elif not os.path.exists(path):
62  exit("ERROR: File or directory " + path + " doesn't exist")
63 
64  if path[len(path)-4:len(path)] != '.txt':
65  if path[len(path)-1] != '/':
66  path = path + '/'
67 
68  aCommand = lsCommand + 'ls '+ path + " | grep .txt"
69 
70  tmpstatus = commands.getstatusoutput( aCommand )
71  tmplistoffiles = tmpstatus[1].split('\n')
72  if len(tmplistoffiles) == 1:
73  if tmplistoffiles[0] == '':
74  exit('ERROR: No files found in directory ' + path)
75  if tmplistoffiles[0].find('No such file or directory') != -1:
76  exit("ERROR: File or directory " + path + " doesn't exist")
77 
78  else:
79  tmplistoffiles.append(path[path.rfind('/')+1:len(path)])
80  path = path[0:path.rfind('/')+1]
81 
82 
83  archiveName = path
84  if path == './':
85  archiveName = os.getcwd() + '/'
86  archiveName = archiveName[archiveName[:len(archiveName)-1].rfind('/')+1:len(archiveName)]
87  if path[:len(path)-1].rfind('/') != -1:
88  archiveName = path[path[:len(path)-1].rfind('/')+1:len(path)]
89 
90  workflowdirArchive = workflowdirArchive + archiveName
91  if tagType != '' :
92  workflowdirArchive = workflowdirArchive[:len(workflowdirArchive)-1] + '_' + tagType + '/'
93  if not os.path.isdir(workflowdirArchive):
94  os.mkdir(workflowdirArchive)
95  elif(option.newarchive):
96 # tmpTime = str(datetime.datetime.now())
97 # tmpTime = tmpTime.replace(' ','-')
98 # tmpTime = tmpTime.replace('.','-')
99 # workflowdirArchive = workflowdirArchive[:len(workflowdirArchive)-1] + '_' + tmpTime + '/'
100 # os.mkdir(workflowdirArchive)
101  for n in range(1,100000):
102  tryDir = workflowdirArchive[:len(workflowdirArchive)-1] + '_' + str(n) + '/'
103  if not os.path.isdir(tryDir):
104  workflowdirArchive = tryDir
105  os.mkdir(workflowdirArchive)
106  break
107  elif n == 100000-1:
108  exit('ERROR: Unbelievable! do you ever clean ' + workflowdir + '?. I think you have to remove some directories!')
109 
110  for ifile in tmplistoffiles:
111  if ifile.find('.txt') != -1:
112  if os.path.isfile(workflowdirArchive+"/"+ifile):
113  if option.overwrite:
114  print "File " + ifile + " already exists in destination. We will overwrite it."
115  else:
116  print "File " + ifile + " already exists in destination. Keep original file."
117  listoffiles.append( workflowdirArchive + ifile )
118  continue
119  listoffiles.append( workflowdirArchive + ifile )
120  # copy to local disk
121  aCommand = cpCommand + 'cp '+ path + ifile + " " + workflowdirArchive
122  print " >> " + aCommand
123  tmpstatus = commands.getstatusoutput( aCommand )
124  return listoffiles
125 
127  global workflowdir
128  global workflowdirLastPayloads
129  global workflowdirTmp
130  global workflowdirArchive
131  if not os.path.isdir(workflowdir):
132  print "Making " + workflowdir + " directory..."
133  os.mkdir(workflowdir)
134 
135  if not os.path.isdir(workflowdirLastPayloads):
136  os.mkdir(workflowdirLastPayloads)
137  else:
138  os.system("rm -f "+ workflowdirLastPayloads + "*")
139 
140  if not os.path.isdir(workflowdirTmp):
141  os.mkdir(workflowdirTmp)
142  else:
143  os.system("rm -f "+ workflowdirTmp + "*")
144 
145  if not os.path.isdir(workflowdirArchive):
146  os.mkdir(workflowdirArchive)
147 
148 ###############################################################################################3
149 if __name__ == '__main__':
150  #if len(sys.argv) < 2:
151 # print "\n [usage] createPayload <beamspot file> <tag name> <IOV since> <IOV till=-1=inf> <IOV comment> <destDB=oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT>"
152  #print " e.g. createPayload BeamFitResults_template.txt BeamSpotObjects_2009_v1_express 122745 \"\" \"beam spot for early collisions\" \"oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT\"\n"
153  #sys.exit()
154 
155 
156  # COMMAND LINE OPTIONS
157  #################################
158  option,args = parse(__doc__)
159  if not args and not option: exit()
160 
161  workflowdir = os.getenv("CMSSW_BASE") + "/src/RecoVertex/BeamSpotProducer/test/workflow/"
162  if option.Output:
163  workflowdir = option.Output
164  if workflowdir[len(workflowdir)-1] != '/':
165  workflowdir = workflowdir + '/'
166  workflowdirLastPayloads = workflowdir + "lastPayloads/"
167  workflowdirTmp = workflowdir + "tmp/"
168  workflowdirArchive = workflowdir + "archive/"
169 
170  if ( (option.data and option.tag) or (option.data and option.copy)):
171  mkWorkflowdir()
172 
173  if not option.data:
174  print "ERROR: You must provide the data file or the a directory with data files"
175  exit()
176 
177  if option.copy:
178  copyToWorkflowdir(option.data)
179  exit("Files copied in " + workflowdirArchive)
180 
181  tagname = ''
182  if option.tag:
183  tagname = option.tag
184  if tagname.find("offline") != -1:
185  tagType = "offline"
186  elif tagname.find("prompt") != -1:
187  tagType = "prompt"
188  elif tagname.find("express") != -1 :
189  tagType = "express"
190  elif tagname.find("hlt") != -1:
191  tagType = "hlt"
192  else:
193  print "I am assuming your tag is for the offline database..."
194  tagType = "offline"
195 
196  else:
197  print "ERROR: You must provide the database tag name"
198  exit()
199 
200  IOVbase = 'runbase'
201  timetype = 'runnumber'
202  if option.IOVbase:
203  if option.IOVbase != "runbase" and option.IOVbase != "lumibase" and option.IOVbase != "timebase":
204  print "\n\n unknown iov base option: "+ option.IOVbase +" \n\n\n"
205  exit()
206  IOVbase = option.IOVbase
207 
208  listoffiles = copyToWorkflowdir(option.data)
209  # sort list of data files in chronological order
210  sortedlist = {}
211 
212  for beam_file in listoffiles:
213 
214  if len(listoffiles)==1 and option.merged:
215  mergedfile = open(beam_file)
216  alllines = mergedfile.readlines()
217  npayloads = len(alllines)/23
218  for i in range(0,npayloads):
219  block = alllines[i * 23: (i+1)*23]
220  #line = block[2]
221  #atime = time.strptime(line.split()[1] + " " + line.split()[2] + " " + line.split()[3],"%Y.%m.%d %H:%M:%S %Z")
222  line = block[0]
223  atime = line.split()[1]
224  sortedlist[atime] = block
225  break
226 
227  tmpfile = open(beam_file)
228  atime = ''
229  arun = ''
230  alumis = ''
231  skip = False
232  for line in tmpfile:
233  if line.find('Runnumber') != -1:
234  arun = line.split()[1]
235  if line.find("EndTimeOfFit") != -1:
236  atime = time.strptime(line.split()[1] + " " + line.split()[2] + " " + line.split()[3],"%Y.%m.%d %H:%M:%S %Z")
237  if line.find("LumiRange") != -1:
238  alumi = line.split()[3]
239  if line.find('Type') != -1 and line.split()[1] == '0':
240  skip = True
241  if skip:
242  print " zero fit result, skip file " + beam_file + " with time stamp:"
243  print " run " + arun + " lumis " + alumis
244  else:
245  sortedlist[int(pack(int(arun), int(alumi)))] = beam_file
246 
247  tmpfile.close()
248 
249  keys = sorted(sortedlist.keys())
250 
251  # write combined data file
252  if not os.path.isdir(workflowdirArchive + "AllIOVs"):
253  os.mkdir(workflowdirArchive + "AllIOVs")
254  allbeam_file = workflowdirArchive + "AllIOVs/" + tagname + "_all_IOVs.txt"
255 # if os.path.isfile(allbeam_file):
256 
257  allfile = open( allbeam_file, 'a')
258  print " merging all results into file: " + allbeam_file
259 
260  # check if merged sqlite file exists
261  if os.path.exists(workflowdirArchive+"payloads/Combined.db"):
262  os.system("rm "+workflowdirArchive+"payloads/Combined.db")
263 
264 
265  nfile = 0
266  iov_since_first = '1'
267  total_files = len(keys)
268 
269  destDB = 'oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT'
270  if option.Test:
271  destDB = 'oracle://cms_orcoff_prep/CMS_COND_BEAMSPOT'
272 
273  iov_comment = 'Beam spot position'
274  for key in keys:
275 
276  iov_since = '1'
277  iov_till = ''
278 
279  suffix = "_" + str(nfile)
280  writedb_template = os.getenv("CMSSW_BASE") + "/src/RecoVertex/BeamSpotProducer/test/write2DB_template.py"
281  readdb_template = os.getenv("CMSSW_BASE") + "/src/RecoVertex/BeamSpotProducer/test/readDB_template.py"
282  sqlite_file_name = tagname + suffix
283  sqlite_file = workflowdirTmp + sqlite_file_name + '.db'
284  metadata_file = workflowdirTmp + sqlite_file_name + '.txt'
285  nfile = nfile + 1
286 
287  #### WRITE sqlite file
288 
289  beam_file = sortedlist[key]
290  tmp_datafilename = workflowdirTmp+"tmp_datafile.txt"
291  if option.merged:
292  tmpfile = file(tmp_datafilename,'w')
293  tmpfile.writelines(sortedlist[key])
294  tmpfile.close()
295  beam_file = tmp_datafilename
296 
297  print "read input beamspot file: " + beam_file
298  tmpfile = open(beam_file)
299  beam_file_tmp = workflowdirTmp + beam_file[beam_file.rfind('/')+1:] + ".tmp"
300  newtmpfile = open(beam_file_tmp,"w")
301  tmp_run = ""
302  tmp_lumi_since = ""
303  tmp_lumi_till = ""
304  for line in tmpfile:
305  if line.find("Runnumber") != -1:
306  iov_since = line.split()[1]
307  iov_till = iov_since
308  tmp_run = line.split()[1]
309  elif line.find("LumiRange") != -1:
310  tmp_lumi_since = line.split()[1]
311  tmp_lumi_till = line.split()[3]
312  elif line.find("BeginTimeOfFit") == -1 and line.find("EndTimeOfFit") == -1 and line.find("LumiRange") == -1:
313  if line.find("sigmaZ0") != -1 and option.zlarge:
314  line = "sigmaZ0 10\n"
315  if line.find("Cov(3,j)") != -1 and option.zlarge:
316  line = "Cov(3,j) 0 0 0 2.5e-05 0 0 0\n"
317  newtmpfile.write(line)
318  allfile.write(line)
319 
320  # pack run number and lumi section
321  if IOVbase == "lumibase":
322  timetype = "lumiid"
323  iov_since = str( pack(int(tmp_run), int(tmp_lumi_since)) )
324  iov_till = str( pack(int(tmp_run), int(tmp_lumi_till)) )
325  # keep first iov for merged output metafile
326  if nfile == 1:
327  iov_since_first = iov_since
328 
329  tmpfile.close()
330  newtmpfile.close()
331  if option.copy:
332  continue
333 
334  beam_file = beam_file_tmp
335 
336  if not writeSqliteFile(sqlite_file,tagname,timetype,beam_file,writedb_template,workflowdirTmp):
337  print "An error occurred while writing the sqlite file: " + sqlite_file
338 
339  commands.getstatusoutput('rm -f ' + beam_file)
340  ##### READ and check sqlite file
341  readSqliteFile(sqlite_file,tagname,readdb_template,workflowdirTmp)
342 
343  #### Merge sqlite files
344  if not os.path.isdir(workflowdirArchive + 'payloads'):
345  os.mkdir(workflowdirArchive + 'payloads')
346 
347  print " merge sqlite file ..."
348  appendSqliteFile("Combined.db", sqlite_file, tagname, iov_since, iov_till ,workflowdirTmp)
349 
350  # keep last payload for express, and prompt tags
351  if nfile == total_files:
352  print " this is the last IOV. You can use this payload for express and prompt conditions."
353  os.system("cp "+sqlite_file+ " "+workflowdirArchive+"payloads/express.db")
354  print "a copy of this payload has been placed at:"
355  print workflowdirArchive+"payloads/express.db"
356 
357  # clean up
358  os.system("rm "+ sqlite_file)
359  print " clean up done."
360 
361  os.system("mv " + workflowdirTmp + "Combined.db " + workflowdirArchive + "payloads/")
362  allfile.close()
363 
364  #### CREATE payload for merged output
365 
366  print " create MERGED payload card for dropbox ..."
367 
368  sqlite_file = workflowdirArchive+'payloads/Combined.db'
369  metadata_file = workflowdirArchive+'payloads/Combined.txt'
370  dfile = open(metadata_file,'w')
371 
372  dfile.write('destDB '+ destDB +'\n')
373  dfile.write('tag '+ tagname +'\n')
374  dfile.write('inputtag' +'\n')
375  dfile.write('since ' + iov_since_first +'\n')
376  # dfile.write('till ' + iov_till +'\n')
377  if IOVbase == "runbase":
378  dfile.write('Timetype runnumber\n')
379  elif IOVbase == "lumibase":
380  dfile.write('Timetype lumiid\n')
381  checkType = tagType
382  if tagType == "express":
383  checkType = "hlt"
384  dfile.write('IOVCheck ' + checkType + '\n')
385  dfile.write('usertext ' + iov_comment +'\n')
386 
387  dfile.close()
388 
389  uuid = commands.getstatusoutput('uuidgen -t')[1]
390  final_sqlite_file_name = tagname + '@' + uuid
391 
392  if not os.path.isdir(workflowdirArchive + 'payloads'):
393  os.mkdir(workflowdirArchive + 'payloads')
394  commands.getstatusoutput('cp ' + sqlite_file + ' ' + workflowdirArchive + 'payloads/' + final_sqlite_file_name + '.db')
395  commands.getstatusoutput('cp ' + metadata_file + ' ' + workflowdirArchive + 'payloads/' + final_sqlite_file_name + '.txt')
396 
397  commands.getstatusoutput('mv ' + sqlite_file + ' ' + workflowdirLastPayloads + final_sqlite_file_name + '.db')
398  commands.getstatusoutput('mv ' + metadata_file + ' ' + workflowdirLastPayloads + final_sqlite_file_name + '.txt')
399 
400  print workflowdirLastPayloads + final_sqlite_file_name + '.db'
401  print workflowdirLastPayloads + final_sqlite_file_name + '.txt'
402 
403  if option.upload:
404  print " scp files to offline Drop Box"
405  dropbox = "/DropBox"
406  if option.Test:
407  dropbox = "/DropBox_test"
408 
409  uploadSqliteFile(workflowdirLastPayloads,final_sqlite_file_name,dropbox)
def pack(high, low)
def copyToWorkflowdir(path)
def appendSqliteFile(combinedSqliteFileName, sqliteFileName, tagName, IOVSince, IOVTill, tmpDir="/tmp/")
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
Definition: FindCaloHit.cc:20
def uploadSqliteFile(sqliteFileDirName, sqliteFileName, dropbox="/DropBox")
def readSqliteFile(sqliteFileName, tagName, sqliteTemplateFile, tmpDir="/tmp/")
def parse(path, config)
Definition: dumpparser.py:13
def writeSqliteFile(sqliteFileName, tagName, timeType, beamSpotFile, sqliteTemplateFile, tmpDir="/tmp/")
#define str(s)
double split
Definition: MVATrainer.cc:139