CMS 3D CMS Logo

All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
MatrixUtil.py
Go to the documentation of this file.
1 class Matrix(dict):
2  def __setitem__(self,key,value):
3  if key in self:
4  print "ERROR in Matrix"
5  print "overwritting",key,"not allowed"
6  else:
7  self.update({float(key):WF(float(key),value)})
8 
9  def addOverride(self,key,override):
10  self[key].addOverride(override)
11 
12 #the class to collect all possible steps
13 class Steps(dict):
14  def __setitem__(self,key,value):
15  if key in self:
16  print "ERROR in Step"
17  print "overwritting",key,"not allowed"
18  import sys
19  sys.exit(-9)
20  else:
21  self.update({key:value})
22  # make the python file named <step>.py
23  #if not '--python' in value: self[key].update({'--python':'%s.py'%(key,)})
24 
25  def overwrite(self,keypair):
26  value=self[keypair[1]]
27  self.update({keypair[0]:value})
28 
29 class WF(list):
30  def __init__(self,n,l):
31  self.extend(l)
32  self.num=n
33  #the actual steps of this WF
34  self.steps=[]
35  self.overrides={}
36  def addOverride(self,overrides):
37  self.overrides=overrides
38 
39  def interpret(self,stepsDict):
40  for s in self:
41  print 'steps',s,stepsDict[s]
42  steps.append(stepsDict[s])
43 
44 
45 
46 def expandLsInterval(lumis):
47  return range(lumis[0],(lumis[1]+1))
48 
50 jsonFile2015 = findFileInPath("DPGAnalysis/Skims/data/Cert_246908-XXXXXX_13TeV_PromptReco_Collisions15_JSON.txt")
51 
52 import json
53 with open(jsonFile2015) as data_file:
54  data_json = json.load(data_file)
55 
56 # return a portion of the 2015 golden json
57 # LS for a full run by default; otherwise a subset of which you determined the size
58 def selectedLS(list_runs=[],maxNum=-1,l_json=data_json):
59  # print "maxNum is %s"%(maxNum)
60  if type(list_runs[0]) !=int:
61  print "ERROR: list_runs must be a list of intergers"
62  return None
63  local_dict = {}
64  ls_count = 0
65 
66  for run in list_runs:
67  if str(run) in l_json.keys():
68  # print "run %s is there"%(run)
69  runNumber = run
70  for LSsegment in l_json[str(run)] :
71  print LSsegment
72  ls_count += (LSsegment[-1] - LSsegment[0] + 1)
73  if (ls_count > maxNum) & (maxNum != -1):
74  break
75  # return local_dict
76  if runNumber in local_dict.keys():
77  local_dict[runNumber].append(LSsegment)
78  else:
79  local_dict[runNumber] = [LSsegment]
80  # print "total LS so far %s - grow %s"%(ls_count,local_dict)
81  #local_dict[runNumber] = [1,2,3]
82  else:
83  print "run %s is NOT there\n\n"%(run)
84  # print "++ %s"%(local_dict)
85 
86  if ( len(local_dict.keys()) > 0 ) :
87  return local_dict
88  else :
89  print "No luminosity section interval passed the json and your selection; returning None"
90  return None
91 
92 # print "\n\n\n THIS IS WHAT I RETURN: %s \n\n"%( selectedLS([251244,251251]) )
93 
94 
95 
96 
97 InputInfoNDefault=2000000
99  def __init__(self,dataSet,label='',run=[],ls={},files=1000,events=InputInfoNDefault,split=10,location='CAF',ib_blacklist=None,ib_block=None) :
100  self.run = run
101  self.ls = ls
102  self.files = files
103  self.events = events
104  self.location = location
105  self.label = label
106  self.dataSet = dataSet
107  self.split = split
108  self.ib_blacklist = ib_blacklist
109  self.ib_block = ib_block
110 
111  def das(self, das_options):
112  if len(self.run) is not 0 or self.ls:
113  # take at most 5 queries, to avoid sinking das
114 
115  # do if you have LS queries
116  # command = ";".join(["das_client.py %s --query '%s'" % (das_options, query) for query in self.queries()[:3] ])
117  command = ";".join(["das_client.py %s --query '%s'" % (das_options, query) for query in self.queries()[:3] ])
118  command = "({0})".format(command)
119  else:
120  command = "das_client.py %s --query '%s'" % (das_options, self.queries()[0])
121 
122  # Run filter on DAS output
123  if self.ib_blacklist:
124  command += " | grep -E -v "
125  command += " ".join(["-e '{0}'".format(pattern) for pattern in self.ib_blacklist])
126  command += " | sort -u"
127  return command
128 
129  def lumiRanges(self):
130  if len(self.run) != 0:
131  return "echo '{\n"+",".join(('"%d":[[1,268435455]]\n'%(x,) for x in self.run))+"}'"
132  if self.ls :
133  return "echo '{\n"+",".join(('"%d" : %s\n'%( int(x),self.ls[x]) for x in self.ls.keys()))+"}'"
134  return None
135 
136  def queries(self):
137  query_by = "block" if self.ib_block else "dataset"
138  query_source = "{0}#{1}".format(self.dataSet, self.ib_block) if self.ib_block else self.dataSet
139 
140  if self.ls :
141  the_queries = []
142  #for query_run in self.ls.keys():
143  # print "run is %s"%(query_run)
144  # if you have a LS list specified, still query das for the full run (multiple ls queries take forever)
145  # and use step1_lumiRanges.log to run only on LS which respect your selection
146 
147  # DO WE WANT T2_CERN ?
148  return ["file {0}={1} run={2}".format(query_by, query_source, query_run) for query_run in self.ls.keys()]
149  #return ["file {0}={1} run={2} site=T2_CH_CERN".format(query_by, query_source, query_run) for query_run in self.ls.keys()]
150 
151 
152  #
153  #for a_range in self.ls[query_run]:
154  # # print "a_range is %s"%(a_range)
155  # the_queries += ["file {0}={1} run={2} lumi={3} ".format(query_by, query_source, query_run, query_ls) for query_ls in expandLsInterval(a_range) ]
156  #print the_queries
157  return the_queries
158 
159  if len(self.run) is not 0:
160  return ["file {0}={1} run={2} site=T2_CH_CERN".format(query_by, query_source, query_run) for query_run in self.run]
161  else:
162  return ["file {0}={1} site=T2_CH_CERN".format(query_by, query_source)]
163 
164  def __str__(self):
165  if self.ib_block:
166  return "input from: {0} with run {1}#{2}".format(self.dataSet, self.ib_block, self.run)
167  return "input from: {0} with run {1}".format(self.dataSet, self.run)
168 
169 
170 # merge dictionaries, with prioty on the [0] index
171 def merge(dictlist,TELL=False):
172  import copy
173  last=len(dictlist)-1
174  if TELL: print last,dictlist
175  if last==0:
176  # ONLY ONE ITEM LEFT
177  return copy.copy(dictlist[0])
178  else:
179  reducedlist=dictlist[0:max(0,last-1)]
180  if TELL: print reducedlist
181  # make a copy of the last item
182  d=copy.copy(dictlist[last])
183  # update with the last but one item
184  d.update(dictlist[last-1])
185  # and recursively do the rest
186  reducedlist.append(d)
187  return merge(reducedlist,TELL)
188 
189 def remove(d,key,TELL=False):
190  import copy
191  e = copy.deepcopy(d)
192  if TELL: print "original dict, BEF: %s"%d
193  del e[key]
194  if TELL: print "copy-removed dict, AFT: %s"%e
195  return e
196 
197 
198 #### Standard release validation samples ####
199 
200 stCond={'--conditions':'auto:run1_mc'}
201 def Kby(N,s):
202  return {'--relval':'%s000,%s'%(N,s)}
203 def Mby(N,s):
204  return {'--relval':'%s000000,%s'%(N,s)}
205 
206 def changeRefRelease(steps,listOfPairs):
207  for s in steps:
208  if ('INPUT' in steps[s]):
209  oldD=steps[s]['INPUT'].dataSet
210  for (ref,newRef) in listOfPairs:
211  if ref in oldD:
212  steps[s]['INPUT'].dataSet=oldD.replace(ref,newRef)
213  if '--pileup_input' in steps[s]:
214  for (ref,newRef) in listOfPairs:
215  if ref in steps[s]['--pileup_input']:
216  steps[s]['--pileup_input']=steps[s]['--pileup_input'].replace(ref,newRef)
217 
218 def addForAll(steps,d):
219  for s in steps:
220  steps[s].update(d)
221 
222 
223 def genvalid(fragment,d,suffix='all',fi='',dataSet=''):
224  import copy
225  c=copy.copy(d)
226  if suffix:
227  c['-s']=c['-s'].replace('genvalid','genvalid_'+suffix)
228  if fi:
229  c['--filein']='lhe:%d'%(fi,)
230  if dataSet:
231  c['--filein']='das:%s'%(dataSet,)
232  c['cfg']=fragment
233  return c
234 
235 
Definition: merge.py:1
def selectedLS
Definition: MatrixUtil.py:58
def expandLsInterval
Definition: MatrixUtil.py:46
def changeRefRelease
Definition: MatrixUtil.py:206
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
list object
Definition: dbtoconf.py:77
def genvalid
Definition: MatrixUtil.py:223
#define update(a, b)
def addForAll
Definition: MatrixUtil.py:218
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run