test
CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
MatrixUtil.py
Go to the documentation of this file.
1 class Matrix(dict):
2  def __setitem__(self,key,value):
3  if key in self:
4  print "ERROR in Matrix"
5  print "overwritting",key,"not allowed"
6  else:
7  self.update({float(key):WF(float(key),value)})
8 
9  def addOverride(self,key,override):
10  self[key].addOverride(override)
11 
12 #the class to collect all possible steps
13 class Steps(dict):
14  def __setitem__(self,key,value):
15  if key in self:
16  print "ERROR in Step"
17  print "overwritting",key,"not allowed"
18  import sys
19  sys.exit(-9)
20  else:
21  self.update({key:value})
22  # make the python file named <step>.py
23  #if not '--python' in value: self[key].update({'--python':'%s.py'%(key,)})
24 
25  def overwrite(self,keypair):
26  value=self[keypair[1]]
27  self.update({keypair[0]:value})
28 
29 class WF(list):
30  def __init__(self,n,l):
31  self.extend(l)
32  self.num=n
33  #the actual steps of this WF
34  self.steps=[]
35  self.overrides={}
36  def addOverride(self,overrides):
37  self.overrides=overrides
38 
39  def interpret(self,stepsDict):
40  for s in self:
41  print 'steps',s,stepsDict[s]
42  steps.append(stepsDict[s])
43 
44 
45 
46 def expandLsInterval(lumis):
47  return range(lumis[0],(lumis[1]+1))
48 
50 jsonFile2015 = findFileInPath("DPGAnalysis/Skims/data/Cert_246908-XXXXXX_13TeV_PromptReco_Collisions15_JSON.txt")
51 
52 import json
53 with open(jsonFile2015) as data_file:
54  data_json2015 = json.load(data_file)
55 
56 # return a portion of the 2015 golden json
57 # LS for a full run by default; otherwise a subset of which you determined the size
58 def selectedLS(list_runs=[],maxNum=-1,l_json=data_json2015):
59  # print "maxNum is %s"%(maxNum)
60  if type(list_runs[0]) !=int:
61  print "ERROR: list_runs must be a list of intergers"
62  return None
63  local_dict = {}
64  ls_count = 0
65 
66  for run in list_runs:
67  if str(run) in l_json.keys():
68  # print "run %s is there"%(run)
69  runNumber = run
70  # print "Doing lumi-section selection for run %s: "%(run)
71  for LSsegment in l_json[str(run)] :
72  # print LSsegment
73  ls_count += (LSsegment[-1] - LSsegment[0] + 1)
74  if (ls_count > maxNum) & (maxNum != -1):
75  break
76  # return local_dict
77  if runNumber in local_dict.keys():
78  local_dict[runNumber].append(LSsegment)
79  else:
80  local_dict[runNumber] = [LSsegment]
81  # print "total LS so far %s - grow %s"%(ls_count,local_dict)
82  #local_dict[runNumber] = [1,2,3]
83  else:
84  print "run %s is NOT present in json %s\n\n"%(run, l_json)
85  # print "++ %s"%(local_dict)
86 
87  if ( len(local_dict.keys()) > 0 ) :
88  return local_dict
89  else :
90  print "No luminosity section interval passed the json and your selection; returning None"
91  return None
92 
93 # print "\n\n\n THIS IS WHAT I RETURN: %s \n\n"%( selectedLS([251244,251251]) )
94 
95 
96 
97 
98 InputInfoNDefault=2000000
100  def __init__(self,dataSet,label='',run=[],ls={},files=1000,events=InputInfoNDefault,split=10,location='CAF',ib_blacklist=None,ib_block=None) :
101  self.run = run
102  self.ls = ls
103  self.files = files
104  self.events = events
105  self.location = location
106  self.label = label
107  self.dataSet = dataSet
108  self.split = split
109  self.ib_blacklist = ib_blacklist
110  self.ib_block = ib_block
111 
112  def das(self, das_options):
113  if len(self.run) is not 0 or self.ls:
114  # take at most 5 queries, to avoid sinking das
115 
116  # do if you have LS queries
117  # command = ";".join(["das_client.py %s --query '%s'" % (das_options, query) for query in self.queries()[:3] ])
118  command = ";".join(["das_client %s --query '%s'" % (das_options, query) for query in self.queries()[:3] ])
119  command = "({0})".format(command)
120  else:
121  command = "das_client %s --query '%s'" % (das_options, self.queries()[0])
122 
123  # Run filter on DAS output
124  if self.ib_blacklist:
125  command += " | grep -E -v "
126  command += " ".join(["-e '{0}'".format(pattern) for pattern in self.ib_blacklist])
127  command += " | sort -u"
128  return command
129 
130  def lumiRanges(self):
131  if len(self.run) != 0:
132  return "echo '{\n"+",".join(('"%d":[[1,268435455]]\n'%(x,) for x in self.run))+"}'"
133  if self.ls :
134  return "echo '{\n"+",".join(('"%d" : %s\n'%( int(x),self.ls[x]) for x in self.ls.keys()))+"}'"
135  return None
136 
137  def queries(self):
138  query_by = "block" if self.ib_block else "dataset"
139  query_source = "{0}#{1}".format(self.dataSet, self.ib_block) if self.ib_block else self.dataSet
140 
141  if self.ls :
142  the_queries = []
143  #for query_run in self.ls.keys():
144  # print "run is %s"%(query_run)
145  # if you have a LS list specified, still query das for the full run (multiple ls queries take forever)
146  # and use step1_lumiRanges.log to run only on LS which respect your selection
147 
148  # DO WE WANT T2_CERN ?
149  return ["file {0}={1} run={2}".format(query_by, query_source, query_run) for query_run in self.ls.keys()]
150  #return ["file {0}={1} run={2} site=T2_CH_CERN".format(query_by, query_source, query_run) for query_run in self.ls.keys()]
151 
152 
153  #
154  #for a_range in self.ls[query_run]:
155  # # print "a_range is %s"%(a_range)
156  # the_queries += ["file {0}={1} run={2} lumi={3} ".format(query_by, query_source, query_run, query_ls) for query_ls in expandLsInterval(a_range) ]
157  #print the_queries
158  return the_queries
159 
160  if len(self.run) is not 0:
161  return ["file {0}={1} run={2} site=T2_CH_CERN".format(query_by, query_source, query_run) for query_run in self.run]
162  else:
163  return ["file {0}={1} site=T2_CH_CERN".format(query_by, query_source)]
164 
165  def __str__(self):
166  if self.ib_block:
167  return "input from: {0} with run {1}#{2}".format(self.dataSet, self.ib_block, self.run)
168  return "input from: {0} with run {1}".format(self.dataSet, self.run)
169 
170 
171 # merge dictionaries, with prioty on the [0] index
172 def merge(dictlist,TELL=False):
173  import copy
174  last=len(dictlist)-1
175  if TELL: print last,dictlist
176  if last==0:
177  # ONLY ONE ITEM LEFT
178  return copy.copy(dictlist[0])
179  else:
180  reducedlist=dictlist[0:max(0,last-1)]
181  if TELL: print reducedlist
182  # make a copy of the last item
183  d=copy.copy(dictlist[last])
184  # update with the last but one item
185  d.update(dictlist[last-1])
186  # and recursively do the rest
187  reducedlist.append(d)
188  return merge(reducedlist,TELL)
189 
190 def remove(d,key,TELL=False):
191  import copy
192  e = copy.deepcopy(d)
193  if TELL: print "original dict, BEF: %s"%d
194  del e[key]
195  if TELL: print "copy-removed dict, AFT: %s"%e
196  return e
197 
198 
199 #### Standard release validation samples ####
200 
201 stCond={'--conditions':'auto:run1_mc'}
202 def Kby(N,s):
203  return {'--relval':'%s000,%s'%(N,s)}
204 def Mby(N,s):
205  return {'--relval':'%s000000,%s'%(N,s)}
206 
207 def changeRefRelease(steps,listOfPairs):
208  for s in steps:
209  if ('INPUT' in steps[s]):
210  oldD=steps[s]['INPUT'].dataSet
211  for (ref,newRef) in listOfPairs:
212  if ref in oldD:
213  steps[s]['INPUT'].dataSet=oldD.replace(ref,newRef)
214  if '--pileup_input' in steps[s]:
215  for (ref,newRef) in listOfPairs:
216  if ref in steps[s]['--pileup_input']:
217  steps[s]['--pileup_input']=steps[s]['--pileup_input'].replace(ref,newRef)
218 
219 def addForAll(steps,d):
220  for s in steps:
221  steps[s].update(d)
222 
223 
224 def genvalid(fragment,d,suffix='all',fi='',dataSet=''):
225  import copy
226  c=copy.copy(d)
227  if suffix:
228  c['-s']=c['-s'].replace('genvalid','genvalid_'+suffix)
229  if fi:
230  c['--filein']='lhe:%d'%(fi,)
231  if dataSet:
232  c['--filein']='das:%s'%(dataSet,)
233  c['cfg']=fragment
234  return c
235 
236 
Definition: merge.py:1
def selectedLS
Definition: MatrixUtil.py:58
def expandLsInterval
Definition: MatrixUtil.py:46
def changeRefRelease
Definition: MatrixUtil.py:207
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
list object
Definition: dbtoconf.py:77
def genvalid
Definition: MatrixUtil.py:224
#define update(a, b)
def addForAll
Definition: MatrixUtil.py:219
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run