4 print "ERROR in Matrix" 5 print "overwritting",key,
"not allowed" 17 print "overwritting",key,
"not allowed" 21 self.update({key:value})
26 value=self[keypair[1]]
27 self.update({keypair[0]:value})
41 print 'steps',s,stepsDict[s]
42 steps.append(stepsDict[s])
47 return range(lumis[0],(lumis[1]+1))
50 jsonFile2015 =
findFileInPath(
"DPGAnalysis/Skims/data/Cert_13TeV_16Dec2015ReReco_Collisions15_25ns_50ns_JSON.txt")
51 jsonFile2016 =
findFileInPath(
"DPGAnalysis/Skims/data/Cert_271036-274240_13TeV_PromptReco_Collisions16_JSON.txt")
54 with open(jsonFile2015)
as data_file:
55 data_json2015 = json.load(data_file)
57 with open(jsonFile2016)
as data_file:
58 data_json2016 = json.load(data_file)
62 def selectedLS(list_runs=[],maxNum=-1,l_json=data_json2015):
64 if type(list_runs[0]) !=int:
65 print "ERROR: list_runs must be a list of intergers" 71 if str(run)
in l_json.keys():
75 for LSsegment
in l_json[
str(run)] :
77 ls_count += (LSsegment[-1] - LSsegment[0] + 1)
78 if (ls_count > maxNum) & (maxNum != -1):
81 if runNumber
in local_dict.keys():
82 local_dict[runNumber].
append(LSsegment)
84 local_dict[runNumber] = [LSsegment]
88 print "run %s is NOT present in json %s\n\n"%(run, l_json)
91 if ( len(local_dict.keys()) > 0 ) :
94 print "No luminosity section interval passed the json and your selection; returning None" 102 InputInfoNDefault=2000000
104 def __init__(self,dataSet,dataSetParent='',label='',run=[],ls={},files=1000,events=InputInfoNDefault,split=10,location='CAF',ib_blacklist=None,ib_block=None) :
117 def das(self, das_options, dataset):
118 if len(self.
run)
is not 0
or self.
ls:
119 queries = self.
queries(dataset)[:3]
120 if len(self.
run) != 0:
121 command =
";".
join([
"dasgoclient %s --query '%s'" % (das_options, query)
for query
in queries])
126 commands.append(
"dasgoclient %s --query 'lumi,%s' --format json | das-selected-lumis.py %s " % (das_options, queries.pop(), lumis.pop()))
127 command =
";".
join(commands)
128 command =
"({0})".
format(command)
130 command =
"dasgoclient %s --query '%s'" % (das_options, self.
queries(dataset)[0])
134 command +=
" | grep -E -v " 136 command +=
" | sort -u" 140 if len(self.
run) != 0:
141 return "echo '{\n"+
",".
join((
'"%d":[[1,268435455]]\n'%(x,)
for x
in self.
run))+
"}'" 143 return "echo '{\n"+
",".
join((
'"%d" : %s\n'%(
int(x),self.
ls[x])
for x
in self.ls.keys()))+
"}'" 149 for run
in self.ls.keys():
151 for rng
in self.
ls[run]: run_lumis.append(
str(rng[0])+
","+
str(rng[1]))
152 query_lumis.append(
":".
join(run_lumis))
156 query_by =
"block" if self.
ib_block else "dataset" 167 return [
"file {0}={1} run={2}".
format(query_by, query_source, query_run)
for query_run
in self.ls.keys()]
178 if len(self.
run)
is not 0:
179 return [
"file {0}={1} run={2} site=T2_CH_CERN".
format(query_by, query_source, query_run)
for query_run
in self.
run]
182 return [
"file {0}={1} site=T2_CH_CERN".
format(query_by, query_source)]
195 if TELL:
print last,dictlist
198 return copy.copy(dictlist[0])
200 reducedlist=dictlist[0:
max(0,last-1)]
201 if TELL:
print reducedlist
203 d=copy.copy(dictlist[last])
205 d.update(dictlist[last-1])
207 reducedlist.append(d)
208 return merge(reducedlist,TELL)
213 if TELL:
print "original dict, BEF: %s"%d
215 if TELL:
print "copy-removed dict, AFT: %s"%e
221 stCond={
'--conditions':
'auto:run1_mc'}
223 return {
'--relval':
'%s000,%s'%(N,s)}
225 return {
'--relval':
'%s000000,%s'%(N,s)}
229 if (
'INPUT' in steps[s]):
230 oldD=steps[s][
'INPUT'].dataSet
231 for (ref,newRef)
in listOfPairs:
233 steps[s][
'INPUT'].dataSet=oldD.replace(ref,newRef)
234 if '--pileup_input' in steps[s]:
235 for (ref,newRef)
in listOfPairs:
236 if ref
in steps[s][
'--pileup_input']:
237 steps[s][
'--pileup_input']=steps[s][
'--pileup_input'].
replace(ref,newRef)
244 def genvalid(fragment,d,suffix='all',fi='',dataSet=''):
248 c[
'-s']=c[
'-s'].
replace(
'genvalid',
'genvalid_'+suffix)
250 c[
'--filein']=
'lhe:%d'%(fi,)
252 c[
'--filein']=
'das:%s'%(dataSet,)
def interpret(self, stepsDict)
def __setitem__(self, key, value)
def genvalid(fragment, d, suffix='all', fi='', dataSet='')
def overwrite(self, keypair)
def replace(string, replacements)
def findFileInPath(theFile)
def expandLsInterval(lumis)
def __setitem__(self, key, value)
static std::string join(char **cmd)
def selectedLS(list_runs=[], maxNum=-1, l_json=data_json2015)
def remove(d, key, TELL=False)
def changeRefRelease(steps, listOfPairs)
def addOverride(self, key, override)
def Kby(N, s)
Standard release validation samples ####.
def addOverride(self, overrides)
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def merge(dictlist, TELL=False)