1 from __future__
import print_function
5 print(
"ERROR in Matrix")
6 print(
"overwritting",key,
"not allowed")
17 print(
"ERROR in Step")
18 print(
"overwritting",key,
"not allowed")
22 self.update({key:value})
27 value=self[keypair[1]]
28 self.update({keypair[0]:value})
42 print(
'steps',s,stepsDict[s])
43 steps.append(stepsDict[s])
48 return range(lumis[0],(lumis[1]+1))
51 jsonFile2015 =
findFileInPath(
"DPGAnalysis/Skims/data/Cert_13TeV_16Dec2015ReReco_Collisions15_25ns_50ns_JSON.txt")
52 jsonFile2016 =
findFileInPath(
"DPGAnalysis/Skims/data/Cert_271036-274240_13TeV_PromptReco_Collisions16_JSON.txt")
55 with open(jsonFile2015)
as data_file:
56 data_json2015 = json.load(data_file)
58 with open(jsonFile2016)
as data_file:
59 data_json2016 = json.load(data_file)
63 def selectedLS(list_runs=[],maxNum=-1,l_json=data_json2015):
65 if not isinstance(list_runs[0], int):
66 print(
"ERROR: list_runs must be a list of intergers")
72 if str(run)
in l_json.keys():
76 for LSsegment
in l_json[
str(run)] :
78 ls_count += (LSsegment[-1] - LSsegment[0] + 1)
79 if (ls_count > maxNum) & (maxNum != -1):
82 if runNumber
in local_dict.keys():
83 local_dict[runNumber].
append(LSsegment)
85 local_dict[runNumber] = [LSsegment]
89 print(
"run %s is NOT present in json %s\n\n"%(run, l_json))
92 if ( len(local_dict) > 0 ) :
95 print(
"No luminosity section interval passed the json and your selection; returning None")
103 InputInfoNDefault=2000000
105 def __init__(self,dataSet,dataSetParent='',label='',run=[],ls={},files=1000,events=InputInfoNDefault,split=10,location='CAF',ib_blacklist=None,ib_block=None) :
118 def das(self, das_options, dataset):
119 if len(self.
run)
is not 0
or self.
ls:
120 queries = self.
queries(dataset)[:3]
121 if len(self.
run) != 0:
122 command =
";".
join([
"dasgoclient %s --query '%s'" % (das_options, query)
for query
in queries])
127 commands.append(
"dasgoclient %s --query 'lumi,%s' --format json | das-selected-lumis.py %s " % (das_options, queries.pop(), lumis.pop()))
128 command =
";".
join(commands)
129 command =
"({0})".
format(command)
131 command =
"dasgoclient %s --query '%s'" % (das_options, self.
queries(dataset)[0])
135 command +=
" | grep -E -v " 137 from os
import getenv
138 if getenv(
"CMSSW_USE_IBEOS",
"false")==
"true":
return command +
" | ibeos-lfn-sort" 139 return command +
" | sort -u" 142 if len(self.
run) != 0:
143 return "echo '{\n"+
",".
join((
'"%d":[[1,268435455]]\n'%(x,)
for x
in self.
run))+
"}'" 145 return "echo '{\n"+
",".
join((
'"%d" : %s\n'%(
int(x),self.
ls[x])
for x
in self.ls.keys()))+
"}'" 151 for run
in self.ls.keys():
153 for rng
in self.
ls[run]: run_lumis.append(
str(rng[0])+
","+
str(rng[1]))
154 query_lumis.append(
":".
join(run_lumis))
158 query_by =
"block" if self.
ib_block else "dataset" 169 return [
"file {0}={1} run={2}".
format(query_by, query_source, query_run)
for query_run
in self.ls.keys()]
180 if len(self.
run)
is not 0:
181 return [
"file {0}={1} run={2} site=T2_CH_CERN".
format(query_by, query_source, query_run)
for query_run
in self.
run]
184 return [
"file {0}={1} site=T2_CH_CERN".
format(query_by, query_source)]
197 if TELL:
print(last,dictlist)
200 return copy.copy(dictlist[0])
202 reducedlist=dictlist[0:
max(0,last-1)]
203 if TELL:
print(reducedlist)
205 d=copy.copy(dictlist[last])
207 d.update(dictlist[last-1])
209 reducedlist.append(d)
210 return merge(reducedlist,TELL)
215 if TELL:
print(
"original dict, BEF: %s"%d)
217 if TELL:
print(
"copy-removed dict, AFT: %s"%e)
223 stCond={
'--conditions':
'auto:run1_mc'}
225 return {
'--relval':
'%s000,%s'%(N,s)}
227 return {
'--relval':
'%s000000,%s'%(N,s)}
231 if (
'INPUT' in steps[s]):
232 oldD=steps[s][
'INPUT'].dataSet
233 for (ref,newRef)
in listOfPairs:
235 steps[s][
'INPUT'].dataSet=oldD.replace(ref,newRef)
236 if '--pileup_input' in steps[s]:
237 for (ref,newRef)
in listOfPairs:
238 if ref
in steps[s][
'--pileup_input']:
239 steps[s][
'--pileup_input']=steps[s][
'--pileup_input'].
replace(ref,newRef)
246 def genvalid(fragment,d,suffix='all',fi='',dataSet=''):
250 c[
'-s']=c[
'-s'].
replace(
'genvalid',
'genvalid_'+suffix)
252 c[
'--filein']=
'lhe:%d'%(fi,)
254 c[
'--filein']=
'das:%s'%(dataSet,)
def interpret(self, stepsDict)
def __setitem__(self, key, value)
def genvalid(fragment, d, suffix='all', fi='', dataSet='')
def overwrite(self, keypair)
def replace(string, replacements)
S & print(S &os, JobReport::InputFile const &f)
def findFileInPath(theFile)
def expandLsInterval(lumis)
def __setitem__(self, key, value)
static std::string join(char **cmd)
def selectedLS(list_runs=[], maxNum=-1, l_json=data_json2015)
def remove(d, key, TELL=False)
def changeRefRelease(steps, listOfPairs)
def addOverride(self, key, override)
def Kby(N, s)
Standard release validation samples ####.
def addOverride(self, overrides)
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run
def merge(dictlist, TELL=False)