18 A very complicate script to upload the results into the DB
20 usage: %prog -d <data file/directory> -t <tag name>
21 -c, --cfg = CFGFILE : Use a different configuration file than the default
22 -l, --lock = LOCK : Create a lock file to have just one script running
23 -o, --overwrite : Overwrite results files when copying.
24 -T, --Test : Upload files to Test dropbox for data validation.
25 -u, --upload : Upload files to offline drop box via scp.
26 -z, --zlarge : Enlarge sigmaZ to 10 +/- 0.005 cm.
28 Francisco Yumiceva (yumiceva@fnal.gov)
29 Lorenzo Uplegger (send an email to Francisco)
36 import commands, re, time
40 from BeamSpotObj
import BeamSpot
41 from IOVObj
import IOV
42 from CommonMethods
import *
48 import simplejson
as json
50 error =
"Please set a crab environment in order to get the proper JSON lib"
58 listIOVCommand =
"cmscond_list_iov -c " + destDB +
" -P /afs/cern.ch/cms/DB/conddb -t " + tagName
59 dbError = commands.getstatusoutput( listIOVCommand )
61 if dbError[1].
find(
"metadata entry \"" + tagName +
"\" does not exist") != -1:
62 print "Creating a new tag because I got the following error contacting the DB"
67 exit(
"ERROR: Can\'t connect to db because:\n" + dbError[1])
70 aCommand = listIOVCommand +
" | grep DB= | tail -1 | awk \'{print $1}\'"
71 output = commands.getstatusoutput( aCommand )
75 exit(
"ERROR: The tag " + tagName +
" exists but I can't get the value of the last IOV")
77 return long(output[1])
81 queryCommand =
"dbs --search --query \"find file where dataset=" + dataSet
83 queryCommand = queryCommand +
" and run > " + str(lastRun)
84 queryCommand = queryCommand +
"\" | grep .root"
86 output = commands.getstatusoutput( queryCommand )
87 return output[1].
split(
'\n')
91 queryCommand =
"dbs --search --query \"find file where dataset=" + dataSet +
" and run = " + str(run) +
"\" | grep .root"
93 output = commands.getstatusoutput( queryCommand )
97 return len(output[1].
split(
'\n'))
101 datasetList = dataSet.split(
',')
103 for data
in datasetList:
104 queryCommand =
"dbs --search --query \"find run,lumi where dataset=" + data
106 queryCommand = queryCommand +
" and run > " + str(lastRun)
108 print " >> " + queryCommand
111 output = commands.getstatusoutput( queryCommand )
112 if output[0] == 0
and not (output[1].
find(
"ERROR") != -1
or output[1].
find(
"Error") != -1) :
115 exit(
"ERROR: I can't contact DBS for the following reason:\n" + output[1])
117 tmpList = output[1].
split(
'\n')
119 outputList.append(file)
121 for out
in outputList:
122 regExp = re.search(
'(\d+)\s+(\d+)',out)
124 run = long(regExp.group(1))
125 lumi = long(regExp.group(2))
126 if not run
in runsAndLumis:
127 runsAndLumis[run] = []
128 runsAndLumis[run].
append(lumi)
136 file = open(fileName);
137 jsonFile = file.read();
139 jsonList=json.loads(jsonFile);
142 for element
in jsonList:
143 selected_dcs[long(element)]=jsonList[element]
148 RunReg =
"http://pccmsdqm04.cern.ch/runregistry"
151 Group =
"Collisions10"
154 FULLADDRESS=RunReg +
"/xmlrpc"
156 server = xmlrpclib.ServerProxy(FULLADDRESS)
158 sel_runtable=
"{groupName} ='" + Group +
"' and {runNumber} > " + str(firstRun)
163 while tries<maxAttempts:
165 run_data = server.DataExporter.export(
'RUN' ,
'GLOBAL',
'csv_runs', sel_runtable)
169 print "Something wrong in accessing runregistry, retrying in 2s....", tries,
"/", maxAttempts
172 if tries==maxAttempts:
173 error =
"Run registry unaccessible.....exiting now"
178 for line
in run_data.split(
"\n"):
179 run=line.split(
',')[0]
181 listOfRuns.append(run)
184 firstRun = listOfRuns[len(listOfRuns)-1];
185 lastRun = listOfRuns[0];
186 sel_dcstable=
"{groupName} ='" + Group +
"' and {runNumber} >= " + str(firstRun) +
" and {runNumber} <= " + str(lastRun) +
" and {parDcsBpix} = 1 and {parDcsFpix} = 1 and {parDcsTibtid} = 1 and {parDcsTecM} = 1 and {parDcsTecP} = 1 and {parDcsTob} = 1 and {parDcsEbminus} = 1 and {parDcsEbplus} = 1 and {parDcsEeMinus} = 1 and {parDcsEePlus} = 1 and {parDcsEsMinus} = 1 and {parDcsEsPlus} = 1 and {parDcsHbheA} = 1 and {parDcsHbheB} = 1 and {parDcsHbheC} = 1 and {parDcsH0} = 1 and {parDcsHf} = 1"
189 while tries<maxAttempts:
192 dcs_data = server.DataExporter.export(
'RUNLUMISECTION',
'GLOBAL',
'json' , sel_dcstable)
195 print "I was able to get the list of runs and now I am trying to access the detector status, retrying in 2s....", tries,
"/", maxAttempts
198 if tries==maxAttempts:
199 error =
"Run registry unaccessible.....exiting now"
203 jsonList=json.loads(dcs_data)
206 for element
in listOfRuns:
208 if element
in jsonList:
209 selected_dcs[long(element)]=jsonList[element]
211 print "WARNING: Run " + element +
" is a collision10 run with 0 lumis in Run Registry!"
212 selected_dcs[long(element)]= [[]]
219 for file
in DBSListOfFiles:
221 if runs.count(runNumber) == 0:
222 runs.append(runNumber)
228 return long(runs[len(runs)-2])
233 regExp = re.search(
'(\D+)_(\d+)_(\d+)_',fileName)
236 return long(regExp.group(3))
240 regExp = re.search(
'(\D+)/(\d+)/(\d+)/(\d+)/(\D+)',fileName)
243 return long(regExp.group(3)+regExp.group(4))
248 listOfFiles =
ls(fromDir,
".txt")
250 for fileName
in listOfFiles:
252 if runNumber > lastUploadedIOV:
253 newRunList.append(fileName)
257 def selectFilesToProcess(listOfRunsAndLumiFromDBS,listOfRunsAndLumiFromRR,newRunList,runListDir,dataSet,mailList,dbsTolerance,dbsTolerancePercent,rrTolerance,missingFilesTolerance,missingLumisTimeout):
258 runsAndLumisProcessed = {}
260 for fileName
in newRunList:
261 file = open(runListDir+fileName)
263 if line.find(
"Runnumber") != -1:
264 run = long(line.replace(
'\n',
'').
split(
' ')[1])
265 elif line.find(
"LumiRange") != -1:
266 lumiLine = line.replace(
'\n',
'').
split(
' ')
267 begLumi = long(lumiLine[1])
268 endLumi = long(lumiLine[3])
269 if begLumi != endLumi:
270 error =
"The lumi range is greater than 1 for run " + str(run) +
" " + line +
" in file: " + runListDir + fileName
273 if not run
in runsAndLumisProcessed:
274 runsAndLumisProcessed[run] = []
275 if begLumi
in runsAndLumisProcessed[run]:
276 print "Lumi " + str(begLumi) +
" in event " + str(run) +
" already exist. This MUST not happen but right now I will ignore this lumi!"
278 runsAndLumisProcessed[run].
append(begLumi)
279 if not run
in runsAndFiles:
280 runsAndFiles[run] = []
281 runsAndFiles[run].
append(fileName)
284 rrKeys = listOfRunsAndLumiFromRR.keys()
286 dbsKeys = listOfRunsAndLumiFromDBS.keys()
289 lastUnclosedRun = dbsKeys.pop()
291 procKeys = runsAndLumisProcessed.keys()
303 for lumiRange
in listOfRunsAndLumiFromRR[run]:
305 for l
in range(lumiRange[0],lumiRange[1]+1):
306 RRList.append(long(l))
307 if run
in procKeys
and run < lastUnclosedRun:
309 if not run
in dbsKeys
and run != lastUnclosedRun:
310 error =
"Impossible but run " + str(run) +
" has been processed and it is also in the run registry but it is not in DBS!"
312 print "Working on run " + str(run)
314 for data
in dataSet.split(
','):
318 if len(runsAndFiles[run]) < nFiles:
319 print "I haven't processed all files yet : " + str(len(runsAndFiles[run])) +
" out of " + str(nFiles) +
" for run: " + str(run)
320 if nFiles - len(runsAndFiles[run]) <= missingFilesTolerance:
322 timeoutType =
timeoutManager(
"DBS_MISMATCH_Run"+str(run),missingLumisTimeout)
324 print "WARNING: I previously set a timeout that expired...I'll continue with the script even if I didn't process all the lumis!"
326 if timeoutType == -1:
327 print "WARNING: Setting the DBS_MISMATCH_Run" + str(run) +
" timeout because I haven't processed all files!"
329 print "WARNING: Timeout DBS_MISMATCH_Run" + str(run) +
" is in progress."
330 return filesToProcess
332 timeoutType =
timeoutManager(
"DBS_VERY_BIG_MISMATCH_Run"+str(run),missingLumisTimeout)
334 error =
"ERROR: I previously set a timeout that expired...I can't continue with the script because there are too many (" + str(nFiles - len(runsAndFiles[run])) +
" files missing) and for too long " + str(missingLumisTimeout/3600) +
" hours! I will process anyway the runs before this one (" + str(run) +
")"
336 return filesToProcess
339 if timeoutType == -1:
340 print "WARNING: Setting the DBS_VERY_BIG_MISMATCH_Run" + str(run) +
" timeout because I haven't processed all files!"
342 print "WARNING: Timeout DBS_VERY_BIG_MISMATCH_Run" + str(run) +
" is in progress."
343 return filesToProcess
348 print "I have processed " + str(len(runsAndFiles[run])) +
" out of " + str(nFiles) +
" files that are in DBS. So I should have all the lumis!"
356 badDBSProcessed,badDBS =
compareLumiLists(runsAndLumisProcessed[run],listOfRunsAndLumiFromDBS[run],errors)
357 for i
in range(0,len(errors)):
358 errors[i] = errors[i].
replace(
"listA",
"the processed lumis")
359 errors[i] = errors[i].
replace(
"listB",
"DBS")
365 print "This is weird because I processed more lumis than the ones that are in DBS!"
366 if len(badDBSProcessed) != 0
and run
in rrKeys:
367 lastError = len(errors)
370 badRRProcessed,badRR =
compareLumiLists(runsAndLumisProcessed[run],RRList,errors)
371 for i
in range(0,len(errors)):
372 errors[i] = errors[i].
replace(
"listA",
"the processed lumis")
373 errors[i] = errors[i].
replace(
"listB",
"Run Registry")
378 if len(badRRProcessed) != 0:
379 print "I have not processed some of the lumis that are in the run registry for run: " + str(run)
380 for lumi
in badDBSProcessed:
381 if lumi
in badRRProcessed:
382 badProcessed.append(lumi)
383 lenA = len(badProcessed)
385 if 100.*lenA/lenB <= dbsTolerancePercent:
386 print "WARNING: I didn't process " + str(100.*lenA/lenB) +
"% of the lumis but I am within the " + str(dbsTolerancePercent) +
"% set in the configuration. Which corrispond to " + str(lenA) +
" out of " + str(lenB) +
" lumis"
389 elif lenA <= dbsTolerance:
390 print "WARNING: I didn't process " + str(lenA) +
" lumis but I am within the " + str(dbsTolerance) +
" lumis set in the configuration. Which corrispond to " + str(lenA) +
" out of " + str(lenB) +
" lumis"
394 error =
"ERROR: For run " + str(run) +
" I didn't process " + str(100.*lenA/lenB) +
"% of the lumis and I am not within the " + str(dbsTolerancePercent) +
"% set in the configuration. The number of lumis that I didn't process (" + str(lenA) +
" out of " + str(lenB) +
") is greater also than the " + str(dbsTolerance) +
" lumis that I can tolerate. I can't process runs >= " + str(run) +
" but I'll process the runs before!"
397 return filesToProcess
400 elif len(errors) != 0:
401 print "The number of lumi sections processed didn't match the one in DBS but they cover all the ones in the Run Registry, so it is ok!"
405 if len(badProcessed) == 0:
406 for file
in runsAndFiles[run]:
407 filesToProcess.append(file)
410 print "This should never happen because if I have errors I return or exit! Run: " + str(run)
412 error =
"Run " + str(run) +
" is in the run registry but it has not been processed yet!"
414 timeoutType =
timeoutManager(
"MISSING_RUNREGRUN_Run"+str(run),missingLumisTimeout)
416 if len(RRList) <= rrTolerance:
417 error =
"WARNING: I previously set the MISSING_RUNREGRUN_Run" + str(run) +
" timeout that expired...I am missing run " + str(run) +
" but it only had " + str(len(RRList)) +
" <= " + str(rrTolerance) +
" lumis. So I will continue and ignore it... "
422 error =
"ERROR: I previously set the MISSING_RUNREGRUN_Run" + str(run) +
" timeout that expired...I am missing run " + str(run) +
" which has " + str(len(RRList)) +
" > " + str(rrTolerance) +
" lumis. I can't continue but I'll process the runs before this one"
424 return filesToProcess
427 if timeoutType == -1:
428 print "WARNING: Setting the MISSING_RUNREGRUN_Run" + str(run) +
" timeout because I haven't processed a run!"
430 print "WARNING: Timeout MISSING_RUNREGRUN_Run" + str(run) +
" is in progress."
431 return filesToProcess
433 return filesToProcess
438 if lenA < lenB-(lenB*float(tolerance)/100):
439 errors.append(
"ERROR: The number of lumi sections is different: listA(" + str(lenA) +
")!=(" + str(lenB) +
")listB")
460 if not lumi
in listB:
461 errors.append(
"Lumi (" + str(lumi) +
") is in listA but not in listB")
466 if not lumi
in listA:
467 errors.append(
"Lumi (" + str(lumi) +
") is in listB but not in listA")
476 for fileName
in newRunList:
478 if not run
in processedRuns:
479 processedRuns[run] = 0
480 processedRuns[run] += 1
482 for run
in processedRuns.keys():
484 if processedRuns[run] < nFiles:
485 print "I haven't processed all files yet : " + str(processedRuns[run]) +
" out of " + str(nFiles) +
" for run: " + str(run)
487 print "All files have been processed for run: " + str(run) +
" (" + str(processedRuns[run]) +
" out of " + str(nFiles) +
")"
494 for file
in listOfFilesToProcess:
497 if run
not in runsToProcess:
498 runsToProcess[run] = 1
500 runsToProcess[run] = runsToProcess[run] + 1
502 for file
in newRunList:
505 if run
not in processedRuns:
506 processedRuns[run] = 1
508 processedRuns[run] = processedRuns[run] + 1
514 processedRunsKeys = processedRuns.keys()
515 processedRunsKeys.sort()
517 for run
in processedRunsKeys:
518 if run <= lastClosedRun :
519 print "For run " + str(run) +
" I have processed " + str(processedRuns[run]) +
" files and in DBS there are " + str(runsToProcess[run]) +
" files!"
520 if not run
in runsToProcess:
521 exit(
"ERROR: I have a result file for run " + str(run) +
" but it doesn't exist in DBS. Impossible but it happened!")
522 lumiList = getDBSLumiListForRun(run)
523 if processedRuns[run] == runsToProcess[run]:
524 for file
in newRunList:
526 selectedFiles.append(file)
528 exit(
"ERROR: For run " + str(run) +
" I have processed " + str(processedRuns[run]) +
" files but in DBS there are " + str(runsToProcess[run]) +
" files!")
534 option,args =
parse(__doc__)
540 print "There is already a megascript runnning...exiting"
546 destDB =
'oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT'
548 destDB =
'oracle://cms_orcoff_prep/CMS_COND_BEAMSPOT'
551 cfgFile =
"BeamSpotWorkflow.cfg"
554 configurationFile = os.getenv(
"CMSSW_BASE") +
"/src/RecoVertex/BeamSpotProducer/scripts/" + cfgFile
555 configuration = ConfigParser.ConfigParser()
556 print 'Reading configuration from ', configurationFile
557 configuration.read(configurationFile)
559 sourceDir = configuration.get(
'Common',
'SOURCE_DIR')
560 archiveDir = configuration.get(
'Common',
'ARCHIVE_DIR')
561 workingDir = configuration.get(
'Common',
'WORKING_DIR')
562 databaseTag = configuration.get(
'Common',
'DBTAG')
563 dataSet = configuration.get(
'Common',
'DATASET')
564 fileIOVBase = configuration.get(
'Common',
'FILE_IOV_BASE')
565 dbIOVBase = configuration.get(
'Common',
'DB_IOV_BASE')
566 dbsTolerance = float(configuration.get(
'Common',
'DBS_TOLERANCE'))
567 dbsTolerancePercent = float(configuration.get(
'Common',
'DBS_TOLERANCE_PERCENT'))
568 rrTolerance = float(configuration.get(
'Common',
'RR_TOLERANCE'))
569 missingFilesTolerance = float(configuration.get(
'Common',
'MISSING_FILES_TOLERANCE'))
570 missingLumisTimeout = float(configuration.get(
'Common',
'MISSING_LUMIS_TIMEOUT'))
571 jsonFileName = configuration.get(
'Common',
'JSON_FILE')
572 mailList = configuration.get(
'Common',
'EMAIL')
575 if sourceDir[len(sourceDir)-1] !=
'/':
576 sourceDir = sourceDir +
'/'
578 error =
"ERROR: The source directory " + sourceDir +
" doesn't exist!"
582 if archiveDir[len(archiveDir)-1] !=
'/':
583 archiveDir = archiveDir +
'/'
584 if not os.path.isdir(archiveDir):
587 if workingDir[len(workingDir)-1] !=
'/':
588 workingDir = workingDir +
'/'
589 if not os.path.isdir(workingDir):
592 os.system(
"rm -f "+ workingDir +
"*")
595 print "Getting last IOV for tag: " + databaseTag
597 if destDB ==
"oracle://cms_orcon_prod/CMS_COND_31X_BEAMSPOT":
604 if dbIOVBase ==
"lumiid":
608 print "Getting list of files processed after IOV " + str(lastUploadedIOV)
609 newProcessedRunList =
getNewRunList(sourceDir,lastUploadedIOV)
610 if len(newProcessedRunList) == 0:
611 exit(
"There are no new runs after " + str(lastUploadedIOV))
614 print "Copying files to archive directory"
617 copiedFiles =
cp(sourceDir,archiveDir,newProcessedRunList)
618 if len(copiedFiles) == len(newProcessedRunList):
620 if len(copiedFiles) != len(newProcessedRunList):
621 error =
"ERROR: I can't copy more than " + str(len(copiedFiles)) +
" files out of " + str(len(newProcessedRunList))
628 print "Getting list of files from DBS"
630 if len(listOfRunsAndLumiFromDBS) == 0:
631 exit(
"There are no files in DBS to process")
632 print "Getting list of files from RR"
634 if(
not listOfRunsAndLumiFromRR):
635 print "Looks like I can't get anything from the run registry so I'll get the data from the json file " + jsonFileName
643 print "Getting list of files to process"
644 selectedFilesToProcess =
selectFilesToProcess(listOfRunsAndLumiFromDBS,listOfRunsAndLumiFromRR,copiedFiles,archiveDir,dataSet,mailList,dbsTolerance,dbsTolerancePercent,rrTolerance,missingFilesTolerance,missingLumisTimeout)
645 if len(selectedFilesToProcess) == 0:
646 exit(
"There are no files to process")
650 print "Copying files from archive to working directory"
653 copiedFiles =
cp(archiveDir,workingDir,selectedFilesToProcess)
654 if len(copiedFiles) == len(selectedFilesToProcess):
657 commands.getstatusoutput(
"rm -rf " + workingDir)
658 if len(copiedFiles) != len(selectedFilesToProcess):
659 error =
"ERROR: I can't copy more than " + str(len(copiedFiles)) +
" files out of " + str(len(selectedFilesToProcess)) +
" from " + archiveDir +
" to " + workingDir
663 print "Sorting and cleaning beamlist"
665 for fileName
in copiedFiles:
670 if len(beamSpotObjList) == 0:
671 error =
"WARNING: None of the processed and copied payloads has a valid fit so there are no results. This shouldn't happen since we are filtering using the run register, so there should be at least one good run."
674 payloadFileName =
"PayloadFile.txt"
677 if dbIOVBase ==
"runnumber":
681 if len(payloadList) == 0:
682 error =
"WARNING: I wasn't able to create any payload even if I have some BeamSpot objects."
686 tmpPayloadFileName = workingDir +
"SingleTmpPayloadFile.txt"
687 tmpSqliteFileName = workingDir +
"SingleTmpSqliteFile.db"
689 writeDBTemplate = os.getenv(
"CMSSW_BASE") +
"/src/RecoVertex/BeamSpotProducer/test/write2DB_template.py"
690 readDBTemplate = os.getenv(
"CMSSW_BASE") +
"/src/RecoVertex/BeamSpotProducer/test/readDB_template.py"
696 uuid = commands.getstatusoutput(
'uuidgen -t')[1]
697 final_sqlite_file_name = databaseTag +
'@' + uuid
698 sqlite_file = workingDir + final_sqlite_file_name +
".db"
699 metadata_file = workingDir + final_sqlite_file_name +
".txt"
701 for payload
in payloadList:
705 payload.sigmaZerr = 2.5e-05
706 tmpFile =
file(tmpPayloadFileName,
'w')
709 if not writeSqliteFile(tmpSqliteFileName,databaseTag,dbIOVBase,tmpPayloadFileName,writeDBTemplate,workingDir):
710 error =
"An error occurred while writing the sqlite file: " + tmpSqliteFileName
712 readSqliteFile(tmpSqliteFileName,databaseTag,readDBTemplate,workingDir)
716 if dbIOVBase ==
"runnumber":
717 iov_since = str(payload.Run)
719 elif dbIOVBase ==
"lumiid":
720 iov_since = str(
pack(int(payload.Run), int(payload.IOVfirst)) )
721 iov_till = str(
pack(int(payload.Run), int(payload.IOVlast)) )
722 elif dbIOVBase ==
"timestamp":
723 error =
"ERROR: IOV " + dbIOVBase +
" still not implemented."
726 error =
"ERROR: IOV " + dbIOVBase +
" unrecognized!"
729 if payloadNumber == 0:
730 iovSinceFirst = iov_since
731 if payloadNumber == len(payloadList)-1:
732 iovTillLast = iov_till
734 appendSqliteFile(final_sqlite_file_name +
".db", tmpSqliteFileName, databaseTag, iov_since, iov_till ,workingDir)
735 os.system(
"rm -f " + tmpPayloadFileName +
" " + tmpSqliteFileName)
740 print " create MERGED payload card for dropbox ..."
742 dfile = open(metadata_file,
'w')
744 dfile.write(
'destDB ' + destDB +
'\n')
745 dfile.write(
'tag ' + databaseTag +
'\n')
746 dfile.write(
'inputtag' +
'\n')
747 dfile.write(
'since ' + iovSinceFirst +
'\n')
749 dfile.write(
'Timetype '+ dbIOVBase +
'\n')
753 print "WARNING TAG TYPE forced to be just offline"
756 if tagType ==
"express":
758 dfile.write(
'IOVCheck ' + checkType +
'\n')
759 dfile.write(
'usertext Beam spot position\n')
766 print " scp files to offline Drop Box"
769 dropbox =
"/DropBox_test"
770 print "UPLOADING TO TEST DB"
773 archive_sqlite_file_name =
"Payloads_" + iovSinceFirst +
"_" + iovTillLast +
"_" + final_sqlite_file_name
774 archive_results_file_name =
"Payloads_" + iovSinceFirst +
"_" + iovTillLast +
"_" + databaseTag +
".txt"
775 if not os.path.isdir(archiveDir +
'payloads'):
776 os.mkdir(archiveDir +
'payloads')
777 commands.getstatusoutput(
'mv ' + sqlite_file +
' ' + archiveDir +
'payloads/' + archive_sqlite_file_name +
'.db')
778 commands.getstatusoutput(
'mv ' + metadata_file +
' ' + archiveDir +
'payloads/' + archive_sqlite_file_name +
'.txt')
779 commands.getstatusoutput(
'cp ' + workingDir + payloadFileName +
' ' + archiveDir +
'payloads/' + archive_results_file_name)
781 print archiveDir +
"payloads/" + archive_sqlite_file_name +
'.db'
782 print archiveDir +
"payloads/" + archive_sqlite_file_name +
'.txt'
786 if __name__ ==
'__main__':
def sendEmail
General utilities.
def getListOfRunsAndLumiFromRR
Evaluator * parse(const T &text)
def createWeightedPayloads
CREATE FILE FOR PAYLOADS.
def aselectFilesToProcess
def getListOfRunsAndLumiFromDBS
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
def getRunNumberFromFileName
def getNumberOfFilesToProcessForRun
def getRunNumberFromDBSName
def getListOfFilesToProcess
def getLastUploadedIOV
General functions.
def sortAndCleanBeamList
Sort and clean list of data for consecutive duplicates and bad fits.
if(conf.exists("allCellsPositionCalc"))
def getListOfRunsAndLumiFromFile