4 print "ERROR in Matrix" 5 print "overwritting",key,
"not allowed" 17 print "overwritting",key,
"not allowed" 21 self.update({key:value})
26 value=self[keypair[1]]
27 self.update({keypair[0]:value})
41 print 'steps',s,stepsDict[s]
42 steps.append(stepsDict[s])
47 return range(lumis[0],(lumis[1]+1))
50 jsonFile2015 =
findFileInPath(
"DPGAnalysis/Skims/data/Cert_13TeV_16Dec2015ReReco_Collisions15_25ns_50ns_JSON.txt")
51 jsonFile2016 =
findFileInPath(
"DPGAnalysis/Skims/data/Cert_271036-274240_13TeV_PromptReco_Collisions16_JSON.txt")
54 with open(jsonFile2015)
as data_file:
55 data_json2015 = json.load(data_file)
57 with open(jsonFile2016)
as data_file:
58 data_json2016 = json.load(data_file)
62 def selectedLS(list_runs=[],maxNum=-1,l_json=data_json2015):
64 if not isinstance(list_runs[0], int):
65 print "ERROR: list_runs must be a list of intergers" 71 if str(run)
in l_json.keys():
75 for LSsegment
in l_json[
str(run)] :
77 ls_count += (LSsegment[-1] - LSsegment[0] + 1)
78 if (ls_count > maxNum) & (maxNum != -1):
81 if runNumber
in local_dict.keys():
82 local_dict[runNumber].
append(LSsegment)
84 local_dict[runNumber] = [LSsegment]
88 print "run %s is NOT present in json %s\n\n"%(run, l_json)
91 if ( len(local_dict) > 0 ) :
94 print "No luminosity section interval passed the json and your selection; returning None" 102 InputInfoNDefault=2000000
104 def __init__(self,dataSet,dataSetParent='',label='',run=[],ls={},files=1000,events=InputInfoNDefault,split=10,location='CAF',ib_blacklist=None,ib_block=None) :
117 def das(self, das_options, dataset):
118 if len(self.
run)
is not 0
or self.
ls:
119 queries = self.
queries(dataset)[:3]
120 if len(self.
run) != 0:
121 command =
";".
join([
"dasgoclient %s --query '%s'" % (das_options, query)
for query
in queries])
126 commands.append(
"dasgoclient %s --query 'lumi,%s' --format json | das-selected-lumis.py %s " % (das_options, queries.pop(), lumis.pop()))
127 command =
";".
join(commands)
128 command =
"({0})".
format(command)
130 command =
"dasgoclient %s --query '%s'" % (das_options, self.
queries(dataset)[0])
134 command +=
" | grep -E -v " 136 from os
import getenv
137 if getenv(
"CMSSW_USE_IBEOS",
"false")==
"true":
return command +
" | ibeos-lfn-sort" 138 return command +
" | sort -u" 141 if len(self.
run) != 0:
142 return "echo '{\n"+
",".
join((
'"%d":[[1,268435455]]\n'%(x,)
for x
in self.
run))+
"}'" 144 return "echo '{\n"+
",".
join((
'"%d" : %s\n'%(
int(x),self.
ls[x])
for x
in self.ls.keys()))+
"}'" 150 for run
in self.ls.keys():
152 for rng
in self.
ls[run]: run_lumis.append(
str(rng[0])+
","+
str(rng[1]))
153 query_lumis.append(
":".
join(run_lumis))
157 query_by =
"block" if self.
ib_block else "dataset" 168 return [
"file {0}={1} run={2}".
format(query_by, query_source, query_run)
for query_run
in self.ls.keys()]
179 if len(self.
run)
is not 0:
180 return [
"file {0}={1} run={2} site=T2_CH_CERN".
format(query_by, query_source, query_run)
for query_run
in self.
run]
183 return [
"file {0}={1} site=T2_CH_CERN".
format(query_by, query_source)]
196 if TELL:
print last,dictlist
199 return copy.copy(dictlist[0])
201 reducedlist=dictlist[0:
max(0,last-1)]
202 if TELL:
print reducedlist
204 d=copy.copy(dictlist[last])
206 d.update(dictlist[last-1])
208 reducedlist.append(d)
209 return merge(reducedlist,TELL)
214 if TELL:
print "original dict, BEF: %s"%d
216 if TELL:
print "copy-removed dict, AFT: %s"%e
222 stCond={
'--conditions':
'auto:run1_mc'}
224 return {
'--relval':
'%s000,%s'%(N,s)}
226 return {
'--relval':
'%s000000,%s'%(N,s)}
230 if (
'INPUT' in steps[s]):
231 oldD=steps[s][
'INPUT'].dataSet
232 for (ref,newRef)
in listOfPairs:
234 steps[s][
'INPUT'].dataSet=oldD.replace(ref,newRef)
235 if '--pileup_input' in steps[s]:
236 for (ref,newRef)
in listOfPairs:
237 if ref
in steps[s][
'--pileup_input']:
238 steps[s][
'--pileup_input']=steps[s][
'--pileup_input'].
replace(ref,newRef)
245 def genvalid(fragment,d,suffix='all',fi='',dataSet=''):
249 c[
'-s']=c[
'-s'].
replace(
'genvalid',
'genvalid_'+suffix)
251 c[
'--filein']=
'lhe:%d'%(fi,)
253 c[
'--filein']=
'das:%s'%(dataSet,)
def interpret(self, stepsDict)
def __setitem__(self, key, value)
def genvalid(fragment, d, suffix='all', fi='', dataSet='')
def overwrite(self, keypair)
def replace(string, replacements)
def findFileInPath(theFile)
def expandLsInterval(lumis)
def __setitem__(self, key, value)
static std::string join(char **cmd)
def selectedLS(list_runs=[], maxNum=-1, l_json=data_json2015)
def remove(d, key, TELL=False)
def changeRefRelease(steps, listOfPairs)
def addOverride(self, key, override)
def Kby(N, s)
Standard release validation samples ####.
def addOverride(self, overrides)
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def merge(dictlist, TELL=False)