4 print "ERROR in Matrix" 5 print "overwritting",key,
"not allowed" 17 print "overwritting",key,
"not allowed" 21 self.update({key:value})
26 value=self[keypair[1]]
27 self.update({keypair[0]:value})
41 print 'steps',s,stepsDict[s]
42 steps.append(stepsDict[s])
47 return range(lumis[0],(lumis[1]+1))
50 jsonFile2015 =
findFileInPath(
"DPGAnalysis/Skims/data/Cert_13TeV_16Dec2015ReReco_Collisions15_25ns_50ns_JSON.txt")
51 jsonFile2016 =
findFileInPath(
"DPGAnalysis/Skims/data/Cert_271036-274240_13TeV_PromptReco_Collisions16_JSON.txt")
54 with open(jsonFile2015)
as data_file:
55 data_json2015 = json.load(data_file)
57 with open(jsonFile2016)
as data_file:
58 data_json2016 = json.load(data_file)
62 def selectedLS(list_runs=[],maxNum=-1,l_json=data_json2015):
64 if type(list_runs[0]) !=int:
65 print "ERROR: list_runs must be a list of intergers" 71 if str(run)
in l_json.keys():
75 for LSsegment
in l_json[
str(run)] :
77 ls_count += (LSsegment[-1] - LSsegment[0] + 1)
78 if (ls_count > maxNum) & (maxNum != -1):
81 if runNumber
in local_dict.keys():
82 local_dict[runNumber].
append(LSsegment)
84 local_dict[runNumber] = [LSsegment]
88 print "run %s is NOT present in json %s\n\n"%(run, l_json)
91 if ( len(local_dict.keys()) > 0 ) :
94 print "No luminosity section interval passed the json and your selection; returning None" 102 InputInfoNDefault=2000000
104 def __init__(self,dataSet,label='',run=[],ls={},files=1000,events=InputInfoNDefault,split=10,location='CAF',ib_blacklist=None,ib_block=None) :
116 def das(self, das_options):
117 if len(self.
run)
is not 0
or self.
ls:
119 if len(self.
run) != 0:
120 command =
";".
join([
"dasgoclient %s --query '%s'" % (das_options, query)
for query
in queries])
125 commands.append(
"dasgoclient %s --query 'lumi,%s' --format json | das-selected-lumis.py %s " % (das_options, queries.pop(), lumis.pop()))
126 command =
";".
join(commands)
127 command =
"({0})".
format(command)
129 command =
"dasgoclient %s --query '%s'" % (das_options, self.
queries()[0])
133 command +=
" | grep -E -v " 135 command +=
" | sort -u" 139 if len(self.
run) != 0:
140 return "echo '{\n"+
",".
join((
'"%d":[[1,268435455]]\n'%(x,)
for x
in self.
run))+
"}'" 142 return "echo '{\n"+
",".
join((
'"%d" : %s\n'%(
int(x),self.
ls[x])
for x
in self.ls.keys()))+
"}'" 148 for run
in self.ls.keys():
150 for rng
in self.
ls[run]: run_lumis.append(
str(rng[0])+
","+
str(rng[1]))
151 query_lumis.append(
":".
join(run_lumis))
155 query_by =
"block" if self.
ib_block else "dataset" 166 return [
"file {0}={1} run={2}".
format(query_by, query_source, query_run)
for query_run
in self.ls.keys()]
177 if len(self.
run)
is not 0:
178 return [
"file {0}={1} run={2} site=T2_CH_CERN".
format(query_by, query_source, query_run)
for query_run
in self.
run]
181 return [
"file {0}={1} site=T2_CH_CERN".
format(query_by, query_source)]
194 if TELL:
print last,dictlist
197 return copy.copy(dictlist[0])
199 reducedlist=dictlist[0:
max(0,last-1)]
200 if TELL:
print reducedlist
202 d=copy.copy(dictlist[last])
204 d.update(dictlist[last-1])
206 reducedlist.append(d)
207 return merge(reducedlist,TELL)
212 if TELL:
print "original dict, BEF: %s"%d
214 if TELL:
print "copy-removed dict, AFT: %s"%e
220 stCond={
'--conditions':
'auto:run1_mc'}
222 return {
'--relval':
'%s000,%s'%(N,s)}
224 return {
'--relval':
'%s000000,%s'%(N,s)}
228 if (
'INPUT' in steps[s]):
229 oldD=steps[s][
'INPUT'].dataSet
230 for (ref,newRef)
in listOfPairs:
232 steps[s][
'INPUT'].dataSet=oldD.replace(ref,newRef)
233 if '--pileup_input' in steps[s]:
234 for (ref,newRef)
in listOfPairs:
235 if ref
in steps[s][
'--pileup_input']:
236 steps[s][
'--pileup_input']=steps[s][
'--pileup_input'].
replace(ref,newRef)
243 def genvalid(fragment,d,suffix='all',fi='',dataSet=''):
247 c[
'-s']=c[
'-s'].
replace(
'genvalid',
'genvalid_'+suffix)
249 c[
'--filein']=
'lhe:%d'%(fi,)
251 c[
'--filein']=
'das:%s'%(dataSet,)
def interpret(self, stepsDict)
def __setitem__(self, key, value)
def genvalid(fragment, d, suffix='all', fi='', dataSet='')
def overwrite(self, keypair)
def replace(string, replacements)
def findFileInPath(theFile)
def expandLsInterval(lumis)
def __setitem__(self, key, value)
static std::string join(char **cmd)
def selectedLS(list_runs=[], maxNum=-1, l_json=data_json2015)
def remove(d, key, TELL=False)
def changeRefRelease(steps, listOfPairs)
def addOverride(self, key, override)
def Kby(N, s)
Standard release validation samples ####.
def addOverride(self, overrides)
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def merge(dictlist, TELL=False)