18 A very complicate script to upload the results into the DB
20 usage: %prog -d <data file/directory> -t <tag name>
21 -c, --cfg = CFGFILE : Use a different configuration file than the default
22 -l, --lock = LOCK : Create a lock file to have just one script running
23 -o, --overwrite : Overwrite results files when copying.
24 -T, --Test : Upload files to Test dropbox for data validation.
25 -u, --upload : Upload files to offline drop box via scp.
26 -z, --zlarge : Enlarge sigmaZ to 10 +/- 0.005 cm.
28 Francisco Yumiceva (yumiceva@fnal.gov)
29 Lorenzo Uplegger (send an email to Francisco)
33 from __future__
import print_function
36 from builtins
import range
38 import subprocess, re, time
40 import configparser
as ConfigParser
42 from BeamSpotObj
import BeamSpot
43 from IOVObj
import IOV
44 from CommonMethods
import *
50 import simplejson
as json
52 error =
"Please set a crab environment in order to get the proper JSON lib"
60 listIOVCommand =
"cmscond_list_iov -c " + destDB +
" -P /afs/cern.ch/cms/DB/conddb -t " + tagName
61 dbError = subprocess.getstatusoutput( listIOVCommand )
63 if dbError[1].
find(
"metadata entry \"" + tagName +
"\" does not exist") != -1:
64 print(
"Creating a new tag because I got the following error contacting the DB")
69 exit(
"ERROR: Can\'t connect to db because:\n" + dbError[1])
72 aCommand = listIOVCommand +
" | grep DB= | tail -1 | awk \'{print $1}\'"
73 output = subprocess.getstatusoutput( aCommand )
77 exit(
"ERROR: The tag " + tagName +
" exists but I can't get the value of the last IOV")
79 return long(output[1])
83 queryCommand =
"dbs --search --query \"find file where dataset=" + dataSet
85 queryCommand = queryCommand +
" and run > " +
str(lastRun)
86 queryCommand = queryCommand +
"\" | grep .root"
88 output = subprocess.getstatusoutput( queryCommand )
89 return output[1].
split(
'\n')
93 queryCommand =
"dbs --search --query \"find file where dataset=" + dataSet +
" and run = " +
str(run) +
"\" | grep .root"
95 output = subprocess.getstatusoutput( queryCommand )
99 return len(output[1].
split(
'\n'))
103 datasetList = dataSet.split(
',')
105 for data
in datasetList:
106 queryCommand =
"dbs --search --query \"find run,lumi where dataset=" + data
108 queryCommand = queryCommand +
" and run > " +
str(lastRun)
110 print(
" >> " + queryCommand)
113 output = subprocess.getstatusoutput( queryCommand )
114 if output[0] == 0
and not (output[1].
find(
"ERROR") != -1
or output[1].
find(
"Error") != -1) :
117 exit(
"ERROR: I can't contact DBS for the following reason:\n" + output[1])
119 tmpList = output[1].
split(
'\n')
121 outputList.append(file)
123 for out
in outputList:
124 regExp = re.search(
'(\d+)\s+(\d+)',out)
126 run = long(regExp.group(1))
127 lumi = long(regExp.group(2))
128 if not run
in runsAndLumis:
129 runsAndLumis[run] = []
130 runsAndLumis[run].
append(lumi)
138 file = open(fileName);
139 jsonFile = file.read();
141 jsonList=json.loads(jsonFile);
144 for element
in jsonList:
145 selected_dcs[long(element)]=jsonList[element]
150 RunReg =
"http://pccmsdqm04.cern.ch/runregistry"
153 Group =
"Collisions10"
156 FULLADDRESS=RunReg +
"/xmlrpc"
158 server = xmlrpclib.ServerProxy(FULLADDRESS)
160 sel_runtable=
"{groupName} ='" + Group +
"' and {runNumber} > " +
str(firstRun)
165 while tries<maxAttempts:
167 run_data = server.DataExporter.export(
'RUN' ,
'GLOBAL',
'csv_runs', sel_runtable)
171 print(
"Something wrong in accessing runregistry, retrying in 2s....", tries,
"/", maxAttempts)
174 if tries==maxAttempts:
175 error =
"Run registry unaccessible.....exiting now"
180 for line
in run_data.split(
"\n"):
181 run=line.split(
',')[0]
183 listOfRuns.append(run)
186 firstRun = listOfRuns[len(listOfRuns)-1];
187 lastRun = listOfRuns[0];
188 sel_dcstable=
"{groupName} ='" + Group +
"' and {runNumber} >= " +
str(firstRun) +
" and {runNumber} <= " +
str(lastRun) +
" and {parDcsBpix} = 1 and {parDcsFpix} = 1 and {parDcsTibtid} = 1 and {parDcsTecM} = 1 and {parDcsTecP} = 1 and {parDcsTob} = 1 and {parDcsEbminus} = 1 and {parDcsEbplus} = 1 and {parDcsEeMinus} = 1 and {parDcsEePlus} = 1 and {parDcsEsMinus} = 1 and {parDcsEsPlus} = 1 and {parDcsHbheA} = 1 and {parDcsHbheB} = 1 and {parDcsHbheC} = 1 and {parDcsH0} = 1 and {parDcsHf} = 1"
191 while tries<maxAttempts:
194 dcs_data = server.DataExporter.export(
'RUNLUMISECTION',
'GLOBAL',
'json' , sel_dcstable)
197 print(
"I was able to get the list of runs and now I am trying to access the detector status, retrying in 2s....", tries,
"/", maxAttempts)
200 if tries==maxAttempts:
201 error =
"Run registry unaccessible.....exiting now"
205 jsonList=json.loads(dcs_data)
208 for element
in listOfRuns:
210 if element
in jsonList:
211 selected_dcs[long(element)]=jsonList[element]
213 print(
"WARNING: Run " + element +
" is a collision10 run with 0 lumis in Run Registry!")
214 selected_dcs[long(element)]= [[]]
221 for file
in DBSListOfFiles:
223 if runs.count(runNumber) == 0:
224 runs.append(runNumber)
230 return long(runs[len(runs)-2])
235 regExp = re.search(
'(\D+)_(\d+)_(\d+)_',fileName)
238 return long(regExp.group(3))
242 regExp = re.search(
'(\D+)/(\d+)/(\d+)/(\d+)/(\D+)',fileName)
245 return long(regExp.group(3)+regExp.group(4))
250 listOfFiles =
ls(fromDir,
".txt")
252 for fileName
in listOfFiles:
254 if runNumber > lastUploadedIOV:
255 newRunList.append(fileName)
259 def selectFilesToProcess(listOfRunsAndLumiFromDBS,listOfRunsAndLumiFromRR,newRunList,runListDir,dataSet,mailList,dbsTolerance,dbsTolerancePercent,rrTolerance,missingFilesTolerance,missingLumisTimeout):
260 runsAndLumisProcessed = {}
262 for fileName
in newRunList:
263 file = open(runListDir+fileName)
265 if line.find(
"Runnumber") != -1:
266 run = long(line.replace(
'\n',
'').
split(
' ')[1])
267 elif line.find(
"LumiRange") != -1:
268 lumiLine = line.replace(
'\n',
'').
split(
' ')
269 begLumi = long(lumiLine[1])
270 endLumi = long(lumiLine[3])
271 if begLumi != endLumi:
272 error =
"The lumi range is greater than 1 for run " +
str(run) +
" " + line +
" in file: " + runListDir + fileName
275 if not run
in runsAndLumisProcessed:
276 runsAndLumisProcessed[run] = []
277 if begLumi
in runsAndLumisProcessed[run]:
278 print(
"Lumi " +
str(begLumi) +
" in event " +
str(run) +
" already exist. This MUST not happen but right now I will ignore this lumi!")
280 runsAndLumisProcessed[run].
append(begLumi)
281 if not run
in runsAndFiles:
282 runsAndFiles[run] = []
283 runsAndFiles[run].
append(fileName)
286 rrKeys = sorted(listOfRunsAndLumiFromRR.keys())
287 dbsKeys = listOfRunsAndLumiFromDBS.keys()
290 lastUnclosedRun = dbsKeys.pop()
292 procKeys = runsAndLumisProcessed.keys()
304 for lumiRange
in listOfRunsAndLumiFromRR[run]:
306 for l
in range(lumiRange[0],lumiRange[1]+1):
307 RRList.append(long(l))
308 if run
in procKeys
and run < lastUnclosedRun:
310 if not run
in dbsKeys
and run != lastUnclosedRun:
311 error =
"Impossible but run " +
str(run) +
" has been processed and it is also in the run registry but it is not in DBS!"
315 for data
in dataSet.split(
','):
319 if len(runsAndFiles[run]) < nFiles:
320 print(
"I haven't processed all files yet : " +
str(len(runsAndFiles[run])) +
" out of " +
str(nFiles) +
" for run: " +
str(run))
321 if nFiles - len(runsAndFiles[run]) <= missingFilesTolerance:
325 print(
"WARNING: I previously set a timeout that expired...I'll continue with the script even if I didn't process all the lumis!")
327 if timeoutType == -1:
328 print(
"WARNING: Setting the DBS_MISMATCH_Run" +
str(run) +
" timeout because I haven't processed all files!")
330 print(
"WARNING: Timeout DBS_MISMATCH_Run" +
str(run) +
" is in progress.")
331 return filesToProcess
333 timeoutType =
timeoutManager(
"DBS_VERY_BIG_MISMATCH_Run"+
str(run),missingLumisTimeout)
335 error =
"ERROR: I previously set a timeout that expired...I can't continue with the script because there are too many (" +
str(nFiles - len(runsAndFiles[run])) +
" files missing) and for too long " +
str(missingLumisTimeout/3600) +
" hours! I will process anyway the runs before this one (" +
str(run) +
")"
337 return filesToProcess
340 if timeoutType == -1:
341 print(
"WARNING: Setting the DBS_VERY_BIG_MISMATCH_Run" +
str(run) +
" timeout because I haven't processed all files!")
343 print(
"WARNING: Timeout DBS_VERY_BIG_MISMATCH_Run" +
str(run) +
" is in progress.")
344 return filesToProcess
349 print(
"I have processed " +
str(len(runsAndFiles[run])) +
" out of " +
str(nFiles) +
" files that are in DBS. So I should have all the lumis!")
357 badDBSProcessed,badDBS =
compareLumiLists(runsAndLumisProcessed[run],listOfRunsAndLumiFromDBS[run],errors)
358 for i
in range(0,len(errors)):
359 errors[i] = errors[i].
replace(
"listA",
"the processed lumis")
360 errors[i] = errors[i].
replace(
"listB",
"DBS")
366 print(
"This is weird because I processed more lumis than the ones that are in DBS!")
367 if len(badDBSProcessed) != 0
and run
in rrKeys:
368 lastError = len(errors)
371 badRRProcessed,badRR =
compareLumiLists(runsAndLumisProcessed[run],RRList,errors)
372 for i
in range(0,len(errors)):
373 errors[i] = errors[i].
replace(
"listA",
"the processed lumis")
374 errors[i] = errors[i].
replace(
"listB",
"Run Registry")
379 if len(badRRProcessed) != 0:
380 print(
"I have not processed some of the lumis that are in the run registry for run: " +
str(run))
381 for lumi
in badDBSProcessed:
382 if lumi
in badRRProcessed:
383 badProcessed.append(lumi)
384 lenA = len(badProcessed)
386 if 100.*lenA/lenB <= dbsTolerancePercent:
387 print(
"WARNING: I didn't process " +
str(100.*lenA/lenB) +
"% of the lumis but I am within the " +
str(dbsTolerancePercent) +
"% set in the configuration. Which corrispond to " +
str(lenA) +
" out of " +
str(lenB) +
" lumis")
390 elif lenA <= dbsTolerance:
391 print(
"WARNING: I didn't process " +
str(lenA) +
" lumis but I am within the " +
str(dbsTolerance) +
" lumis set in the configuration. Which corrispond to " +
str(lenA) +
" out of " +
str(lenB) +
" lumis")
395 error =
"ERROR: For run " +
str(run) +
" I didn't process " +
str(100.*lenA/lenB) +
"% of the lumis and I am not within the " +
str(dbsTolerancePercent) +
"% set in the configuration. The number of lumis that I didn't process (" +
str(lenA) +
" out of " +
str(lenB) +
") is greater also than the " +
str(dbsTolerance) +
" lumis that I can tolerate. I can't process runs >= " +
str(run) +
" but I'll process the runs before!"
398 return filesToProcess
401 elif len(errors) != 0:
402 print(
"The number of lumi sections processed didn't match the one in DBS but they cover all the ones in the Run Registry, so it is ok!")
406 if len(badProcessed) == 0:
407 for file
in runsAndFiles[run]:
408 filesToProcess.append(file)
411 print(
"This should never happen because if I have errors I return or exit! Run: " +
str(run))
413 error =
"Run " +
str(run) +
" is in the run registry but it has not been processed yet!"
415 timeoutType =
timeoutManager(
"MISSING_RUNREGRUN_Run"+
str(run),missingLumisTimeout)
417 if len(RRList) <= rrTolerance:
418 error =
"WARNING: I previously set the MISSING_RUNREGRUN_Run" +
str(run) +
" timeout that expired...I am missing run " +
str(run) +
" but it only had " +
str(len(RRList)) +
" <= " +
str(rrTolerance) +
" lumis. So I will continue and ignore it... "
423 error =
"ERROR: I previously set the MISSING_RUNREGRUN_Run" +
str(run) +
" timeout that expired...I am missing run " +
str(run) +
" which has " +
str(len(RRList)) +
" > " +
str(rrTolerance) +
" lumis. I can't continue but I'll process the runs before this one"
425 return filesToProcess
428 if timeoutType == -1:
429 print(
"WARNING: Setting the MISSING_RUNREGRUN_Run" +
str(run) +
" timeout because I haven't processed a run!")
431 print(
"WARNING: Timeout MISSING_RUNREGRUN_Run" +
str(run) +
" is in progress.")
432 return filesToProcess
434 return filesToProcess
439 if lenA < lenB-(lenB*float(tolerance)/100):
440 errors.append(
"ERROR: The number of lumi sections is different: listA(" +
str(lenA) +
")!=(" +
str(lenB) +
")listB")
461 if not lumi
in listB:
462 errors.append(
"Lumi (" +
str(lumi) +
") is in listA but not in listB")
467 if not lumi
in listA:
468 errors.append(
"Lumi (" +
str(lumi) +
") is in listB but not in listA")
477 for fileName
in newRunList:
479 if not run
in processedRuns:
480 processedRuns[run] = 0
481 processedRuns[run] += 1
483 for run
in processedRuns.keys():
485 if processedRuns[run] < nFiles:
486 print(
"I haven't processed all files yet : " +
str(processedRuns[run]) +
" out of " +
str(nFiles) +
" for run: " +
str(run))
488 print(
"All files have been processed for run: " +
str(run) +
" (" +
str(processedRuns[run]) +
" out of " +
str(nFiles) +
")")
495 for file
in listOfFilesToProcess:
498 if run
not in runsToProcess:
499 runsToProcess[run] = 1
501 runsToProcess[run] = runsToProcess[run] + 1
503 for file
in newRunList:
506 if run
not in processedRuns:
507 processedRuns[run] = 1
509 processedRuns[run] = processedRuns[run] + 1
515 processedRunsKeys = sorted(processedRuns.keys())
517 for run
in processedRunsKeys:
518 if run <= lastClosedRun :
519 print(
"For run " +
str(run) +
" I have processed " +
str(processedRuns[run]) +
" files and in DBS there are " +
str(runsToProcess[run]) +
" files!")
520 if not run
in runsToProcess:
521 exit(
"ERROR: I have a result file for run " +
str(run) +
" but it doesn't exist in DBS. Impossible but it happened!")
522 lumiList = getDBSLumiListForRun(run)
523 if processedRuns[run] == runsToProcess[run]:
524 for file
in newRunList:
526 selectedFiles.append(file)
528 exit(
"ERROR: For run " +
str(run) +
" I have processed " +
str(processedRuns[run]) +
" files but in DBS there are " +
str(runsToProcess[run]) +
" files!")
534 option,args =
parse(__doc__)
540 print(
"There is already a megascript runnning...exiting")
546 destDB =
'oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT'
548 destDB =
'oracle://cms_orcoff_prep/CMS_COND_BEAMSPOT'
551 cfgFile =
"BeamSpotWorkflow.cfg"
554 configurationFile = os.getenv(
"CMSSW_BASE") +
"/src/RecoVertex/BeamSpotProducer/scripts/" + cfgFile
555 configuration = ConfigParser.ConfigParser()
556 print(
'Reading configuration from ', configurationFile)
557 configuration.read(configurationFile)
559 sourceDir = configuration.get(
'Common',
'SOURCE_DIR')
560 archiveDir = configuration.get(
'Common',
'ARCHIVE_DIR')
561 workingDir = configuration.get(
'Common',
'WORKING_DIR')
562 databaseTag = configuration.get(
'Common',
'DBTAG')
563 dataSet = configuration.get(
'Common',
'DATASET')
564 fileIOVBase = configuration.get(
'Common',
'FILE_IOV_BASE')
565 dbIOVBase = configuration.get(
'Common',
'DB_IOV_BASE')
566 dbsTolerance = float(configuration.get(
'Common',
'DBS_TOLERANCE'))
567 dbsTolerancePercent = float(configuration.get(
'Common',
'DBS_TOLERANCE_PERCENT'))
568 rrTolerance = float(configuration.get(
'Common',
'RR_TOLERANCE'))
569 missingFilesTolerance = float(configuration.get(
'Common',
'MISSING_FILES_TOLERANCE'))
570 missingLumisTimeout = float(configuration.get(
'Common',
'MISSING_LUMIS_TIMEOUT'))
571 jsonFileName = configuration.get(
'Common',
'JSON_FILE')
572 mailList = configuration.get(
'Common',
'EMAIL')
575 if sourceDir[len(sourceDir)-1] !=
'/':
576 sourceDir = sourceDir +
'/'
578 error =
"ERROR: The source directory " + sourceDir +
" doesn't exist!"
582 if archiveDir[len(archiveDir)-1] !=
'/':
583 archiveDir = archiveDir +
'/'
584 if not os.path.isdir(archiveDir):
587 if workingDir[len(workingDir)-1] !=
'/':
588 workingDir = workingDir +
'/'
589 if not os.path.isdir(workingDir):
592 os.system(
"rm -f "+ workingDir +
"*")
595 print(
"Getting last IOV for tag: " + databaseTag)
597 if destDB ==
"oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT":
604 if dbIOVBase ==
"lumiid":
608 print(
"Getting list of files processed after IOV " +
str(lastUploadedIOV))
609 newProcessedRunList =
getNewRunList(sourceDir,lastUploadedIOV)
610 if len(newProcessedRunList) == 0:
611 exit(
"There are no new runs after " +
str(lastUploadedIOV))
614 print(
"Copying files to archive directory")
617 copiedFiles =
cp(sourceDir,archiveDir,newProcessedRunList)
618 if len(copiedFiles) == len(newProcessedRunList):
620 if len(copiedFiles) != len(newProcessedRunList):
621 error =
"ERROR: I can't copy more than " +
str(len(copiedFiles)) +
" files out of " +
str(len(newProcessedRunList))
628 print(
"Getting list of files from DBS")
630 if len(listOfRunsAndLumiFromDBS) == 0:
631 exit(
"There are no files in DBS to process")
632 print(
"Getting list of files from RR")
634 if(
not listOfRunsAndLumiFromRR):
635 print(
"Looks like I can't get anything from the run registry so I'll get the data from the json file " + jsonFileName)
643 print(
"Getting list of files to process")
644 selectedFilesToProcess =
selectFilesToProcess(listOfRunsAndLumiFromDBS,listOfRunsAndLumiFromRR,copiedFiles,archiveDir,dataSet,mailList,dbsTolerance,dbsTolerancePercent,rrTolerance,missingFilesTolerance,missingLumisTimeout)
645 if len(selectedFilesToProcess) == 0:
646 exit(
"There are no files to process")
650 print(
"Copying files from archive to working directory")
653 copiedFiles =
cp(archiveDir,workingDir,selectedFilesToProcess)
654 if len(copiedFiles) == len(selectedFilesToProcess):
657 subprocess.getstatusoutput(
"rm -rf " + workingDir)
658 if len(copiedFiles) != len(selectedFilesToProcess):
659 error =
"ERROR: I can't copy more than " +
str(len(copiedFiles)) +
" files out of " +
str(len(selectedFilesToProcess)) +
" from " + archiveDir +
" to " + workingDir
663 print(
"Sorting and cleaning beamlist")
665 for fileName
in copiedFiles:
670 if len(beamSpotObjList) == 0:
671 error =
"WARNING: None of the processed and copied payloads has a valid fit so there are no results. This shouldn't happen since we are filtering using the run register, so there should be at least one good run."
674 payloadFileName =
"PayloadFile.txt"
677 if dbIOVBase ==
"runnumber":
681 if len(payloadList) == 0:
682 error =
"WARNING: I wasn't able to create any payload even if I have some BeamSpot objects."
686 tmpPayloadFileName = workingDir +
"SingleTmpPayloadFile.txt"
687 tmpSqliteFileName = workingDir +
"SingleTmpSqliteFile.db"
689 writeDBTemplate = os.getenv(
"CMSSW_BASE") +
"/src/RecoVertex/BeamSpotProducer/test/write2DB_template.py"
690 readDBTemplate = os.getenv(
"CMSSW_BASE") +
"/src/RecoVertex/BeamSpotProducer/test/readDB_template.py"
696 uuid = subprocess.getstatusoutput(
'uuidgen -t')[1]
697 final_sqlite_file_name = databaseTag +
'@' + uuid
698 sqlite_file = workingDir + final_sqlite_file_name +
".db"
699 metadata_file = workingDir + final_sqlite_file_name +
".txt"
701 for payload
in payloadList:
705 payload.sigmaZerr = 2.5e-05
706 tmpFile =
file(tmpPayloadFileName,
'w')
709 if not writeSqliteFile(tmpSqliteFileName,databaseTag,dbIOVBase,tmpPayloadFileName,writeDBTemplate,workingDir):
710 error =
"An error occurred while writing the sqlite file: " + tmpSqliteFileName
712 readSqliteFile(tmpSqliteFileName,databaseTag,readDBTemplate,workingDir)
716 if dbIOVBase ==
"runnumber":
717 iov_since =
str(payload.Run)
719 elif dbIOVBase ==
"lumiid":
720 iov_since =
str(
pack(int(payload.Run), int(payload.IOVfirst)) )
721 iov_till =
str(
pack(int(payload.Run), int(payload.IOVlast)) )
722 elif dbIOVBase ==
"timestamp":
723 error =
"ERROR: IOV " + dbIOVBase +
" still not implemented."
726 error =
"ERROR: IOV " + dbIOVBase +
" unrecognized!"
729 if payloadNumber == 0:
730 iovSinceFirst = iov_since
731 if payloadNumber == len(payloadList)-1:
732 iovTillLast = iov_till
734 appendSqliteFile(final_sqlite_file_name +
".db", tmpSqliteFileName, databaseTag, iov_since, iov_till ,workingDir)
735 os.system(
"rm -f " + tmpPayloadFileName +
" " + tmpSqliteFileName)
740 print(
" create MERGED payload card for dropbox ...")
742 dfile = open(metadata_file,
'w')
744 dfile.write(
'destDB ' + destDB +
'\n')
745 dfile.write(
'tag ' + databaseTag +
'\n')
746 dfile.write(
'inputtag' +
'\n')
747 dfile.write(
'since ' + iovSinceFirst +
'\n')
749 dfile.write(
'Timetype '+ dbIOVBase +
'\n')
753 print(
"WARNING TAG TYPE forced to be just offline")
756 if tagType ==
"express":
758 dfile.write(
'IOVCheck ' + checkType +
'\n')
759 dfile.write(
'usertext Beam spot position\n')
766 print(
" scp files to offline Drop Box")
769 dropbox =
"/DropBox_test"
770 print(
"UPLOADING TO TEST DB")
773 archive_sqlite_file_name =
"Payloads_" + iovSinceFirst +
"_" + iovTillLast +
"_" + final_sqlite_file_name
774 archive_results_file_name =
"Payloads_" + iovSinceFirst +
"_" + iovTillLast +
"_" + databaseTag +
".txt"
775 if not os.path.isdir(archiveDir +
'payloads'):
776 os.mkdir(archiveDir +
'payloads')
777 subprocess.getstatusoutput(
'mv ' + sqlite_file +
' ' + archiveDir +
'payloads/' + archive_sqlite_file_name +
'.db')
778 subprocess.getstatusoutput(
'mv ' + metadata_file +
' ' + archiveDir +
'payloads/' + archive_sqlite_file_name +
'.txt')
779 subprocess.getstatusoutput(
'cp ' + workingDir + payloadFileName +
' ' + archiveDir +
'payloads/' + archive_results_file_name)
781 print(archiveDir +
"payloads/" + archive_sqlite_file_name +
'.db')
782 print(archiveDir +
"payloads/" + archive_sqlite_file_name +
'.txt')
786 if __name__ ==
'__main__':
def sendEmail
General utilities.
def getListOfRunsAndLumiFromRR
boost::dynamic_bitset append(const boost::dynamic_bitset<> &bs1, const boost::dynamic_bitset<> &bs2)
this method takes two bitsets bs1 and bs2 and returns result of bs2 appended to the end of bs1 ...
def createWeightedPayloads
CREATE FILE FOR PAYLOADS.
def aselectFilesToProcess
def getListOfRunsAndLumiFromDBS
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
const uint16_t range(const Frame &aFrame)
def getRunNumberFromFileName
if(conf_.getParameter< bool >("UseStripCablingDB"))
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
def getNumberOfFilesToProcessForRun
def getRunNumberFromDBSName
def getListOfFilesToProcess
int dirExists(const std::string &path)
def getLastUploadedIOV
General functions.
def sortAndCleanBeamList
Sort and clean list of data for consecutive duplicates and bad fits.
def getListOfRunsAndLumiFromFile