1 from __future__
import print_function
6 print(
"ERROR in Matrix")
7 print(
"overwriting",key,
"not allowed")
18 print(
"ERROR in Step")
19 print(
"overwriting",key,
"not allowed")
23 self.update({key:value})
28 value=self[keypair[1]]
29 self.update({keypair[0]:value})
43 print(
'steps',s,stepsDict[s])
44 steps.append(stepsDict[s])
49 return range(lumis[0],(lumis[1]+1))
52 jsonFile2015 =
findFileInPath(
"DPGAnalysis/Skims/data/Cert_13TeV_16Dec2015ReReco_Collisions15_25ns_50ns_JSON.txt")
53 jsonFile2016 =
findFileInPath(
"DPGAnalysis/Skims/data/Cert_271036-274240_13TeV_PromptReco_Collisions16_JSON.txt")
56 with open(jsonFile2015)
as data_file:
57 data_json2015 = json.load(data_file)
59 with open(jsonFile2016)
as data_file:
60 data_json2016 = json.load(data_file)
64 def selectedLS(list_runs=[],maxNum=-1,l_json=data_json2015):
66 if not isinstance(list_runs[0], int):
67 print(
"ERROR: list_runs must be a list of integers")
73 if str(run)
in l_json.keys():
77 for LSsegment
in l_json[
str(run)] :
79 ls_count += (LSsegment[-1] - LSsegment[0] + 1)
80 if (ls_count > maxNum) & (maxNum != -1):
83 if runNumber
in local_dict.keys():
84 local_dict[runNumber].
append(LSsegment)
86 local_dict[runNumber] = [LSsegment]
90 print(
"run %s is NOT present in json %s\n\n"%(run, l_json))
93 if ( len(local_dict) > 0 ) :
96 print(
"No luminosity section interval passed the json and your selection; returning None")
104 InputInfoNDefault=2000000
106 def __init__(self,dataSet,dataSetParent='',label='',run=[],ls={},files=1000,events=InputInfoNDefault,split=10,location='CAF',ib_blacklist=None,ib_block=None,skimEvents=False) :
120 def das(self, das_options, dataset):
122 queries = self.
queries(dataset)
123 if len(self.
run) != 0:
124 command =
";".
join([
"dasgoclient %s --query '%s'" % (das_options, query)
for query
in queries])
129 commands.append(
"dasgoclient %s --query 'lumi,%s' --format json | das-selected-lumis.py %s " % (das_options, queries.pop(), lumis.pop()))
130 command =
";".
join(commands)
131 command =
"({0})".
format(command)
133 command =
"dasgoclient %s --query '%s'" % (das_options, self.
queries(dataset)[0])
135 from os
import getenv
136 if getenv(
"JENKINS_PREFIX")
is not None:
138 command =
"das-up-to-nevents.py -d %s -e %d -pc"%(dataset,self.
events)
140 command =
"das-up-to-nevents.py -d %s -e %d"%(dataset,self.
events)
143 command +=
" | grep -E -v " 146 from os
import getenv
147 if getenv(
"CMSSW_USE_IBEOS",
"false")==
"true":
return command +
" | ibeos-lfn-sort" 148 return command +
" | sort -u" 153 if len(self.
run) != 0:
154 return "echo '{\n"+
",".
join((
'"%d":[[1,268435455]]\n'%(x,)
for x
in self.
run))+
"}'" 156 return "echo '{\n"+
",".
join((
'"%d" : %s\n'%(
int(x),self.
ls[x])
for x
in self.
ls.
keys()))+
"}'" 162 for run
in sorted(self.
ls.
keys()):
164 for rng
in self.
ls[run]:
165 if isinstance(rng, int):
166 run_lumis.append(
str(rng))
168 run_lumis.append(
str(rng[0])+
","+
str(rng[1]))
169 query_lumis.append(
":".
join(run_lumis))
173 query_by =
"block" if self.
ib_block else "dataset" 184 return [
"file {0}={1} run={2}".
format(query_by, query_source, query_run)
for query_run
in sorted(self.
ls.
keys())]
195 site =
" site=T2_CH_CERN" 196 if "CMSSW_DAS_QUERY_SITES" in os.environ:
197 if os.environ[
"CMSSW_DAS_QUERY_SITES"]:
198 site =
" site=%s" % os.environ[
"CMSSW_DAS_QUERY_SITES"]
201 if len(self.
run) != 0:
202 return [
"file {0}={1} run={2}{3}".
format(query_by, query_source, query_run, site)
for query_run
in self.
run]
205 return [
"file {0}={1}{2}".
format(query_by, query_source, site)]
218 if TELL:
print(last,dictlist)
221 return copy.copy(dictlist[0])
223 reducedlist=dictlist[0:
max(0,last-1)]
224 if TELL:
print(reducedlist)
226 d=copy.copy(dictlist[last])
228 d.update(dictlist[last-1])
230 reducedlist.append(d)
231 return merge(reducedlist,TELL)
236 if TELL:
print(
"original dict, BEF: %s"%d)
238 if TELL:
print(
"copy-removed dict, AFT: %s"%e)
244 stCond={
'--conditions':
'auto:run1_mc'}
246 return {
'--relval':
'%s000,%s'%(N,s)}
248 return {
'--relval':
'%s000000,%s'%(N,s)}
252 if (
'INPUT' in steps[s]):
253 oldD=steps[s][
'INPUT'].dataSet
254 for (ref,newRef)
in listOfPairs:
256 steps[s][
'INPUT'].dataSet=oldD.replace(ref,newRef)
257 if '--pileup_input' in steps[s]:
258 for (ref,newRef)
in listOfPairs:
259 if ref
in steps[s][
'--pileup_input']:
260 steps[s][
'--pileup_input']=steps[s][
'--pileup_input'].
replace(ref,newRef)
267 def genvalid(fragment,d,suffix='all',fi='',dataSet=''):
271 c[
'-s']=c[
'-s'].
replace(
'genvalid',
'genvalid_'+suffix)
273 c[
'--filein']=
'lhe:%d'%(fi,)
275 c[
'--filein']=
'das:%s'%(dataSet,)
281 dups = set(x
for x
in input
if x
in seen
or seen.add(x))
def interpret(self, stepsDict)
def __setitem__(self, key, value)
def genvalid(fragment, d, suffix='all', fi='', dataSet='')
def overwrite(self, keypair)
def replace(string, replacements)
def findFileInPath(theFile)
def expandLsInterval(lumis)
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
def __setitem__(self, key, value)
static std::string join(char **cmd)
def selectedLS(list_runs=[], maxNum=-1, l_json=data_json2015)
def remove(d, key, TELL=False)
def changeRefRelease(steps, listOfPairs)
def addOverride(self, key, override)
def Kby(N, s)
Standard release validation samples ####.
def addOverride(self, overrides)
def merge(dictlist, TELL=False)