CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
MatrixUtil.py
Go to the documentation of this file.
1 class Matrix(dict):
2  def __setitem__(self,key,value):
3  if key in self:
4  print "ERROR in Matrix"
5  print "overwritting",key,"not allowed"
6  else:
7  self.update({float(key):WF(float(key),value)})
8 
9  def addOverride(self,key,override):
10  self[key].addOverride(override)
11 
12 #the class to collect all possible steps
13 class Steps(dict):
14  def __setitem__(self,key,value):
15  if key in self:
16  print "ERROR in Step"
17  print "overwritting",key,"not allowed"
18  import sys
19  sys.exit(-9)
20  else:
21  self.update({key:value})
22  # make the python file named <step>.py
23  #if not '--python' in value: self[key].update({'--python':'%s.py'%(key,)})
24 
25  def overwrite(self,keypair):
26  value=self[keypair[1]]
27  self.update({keypair[0]:value})
28 
29 class WF(list):
30  def __init__(self,n,l):
31  self.extend(l)
32  self.num=n
33  #the actual steps of this WF
34  self.steps=[]
35  self.overrides={}
36  def addOverride(self,overrides):
37  self.overrides=overrides
38 
39  def interpret(self,stepsDict):
40  for s in self:
41  print 'steps',s,stepsDict[s]
42  steps.append(stepsDict[s])
43 
44 
45 
46 def expandLsInterval(lumis):
47  return range(lumis[0],(lumis[1]+1))
48 
50 jsonFile2015 = findFileInPath("DPGAnalysis/Skims/data/Cert_13TeV_16Dec2015ReReco_Collisions15_25ns_50ns_JSON.txt")
51 
52 import json
53 with open(jsonFile2015) as data_file:
54  data_json2015 = json.load(data_file)
55 
56 # return a portion of the 2015 golden json
57 # LS for a full run by default; otherwise a subset of which you determined the size
58 def selectedLS(list_runs=[],maxNum=-1,l_json=data_json2015):
59  # print "maxNum is %s"%(maxNum)
60  if type(list_runs[0]) !=int:
61  print "ERROR: list_runs must be a list of intergers"
62  return None
63  local_dict = {}
64  ls_count = 0
65 
66  for run in list_runs:
67  if str(run) in l_json.keys():
68  # print "run %s is there"%(run)
69  runNumber = run
70  # print "Doing lumi-section selection for run %s: "%(run)
71  for LSsegment in l_json[str(run)] :
72  # print LSsegment
73  ls_count += (LSsegment[-1] - LSsegment[0] + 1)
74  if (ls_count > maxNum) & (maxNum != -1):
75  break
76  # return local_dict
77  if runNumber in local_dict.keys():
78  local_dict[runNumber].append(LSsegment)
79  else:
80  local_dict[runNumber] = [LSsegment]
81  # print "total LS so far %s - grow %s"%(ls_count,local_dict)
82  #local_dict[runNumber] = [1,2,3]
83  else:
84  print "run %s is NOT present in json %s\n\n"%(run, l_json)
85  # print "++ %s"%(local_dict)
86 
87  if ( len(local_dict.keys()) > 0 ) :
88  return local_dict
89  else :
90  print "No luminosity section interval passed the json and your selection; returning None"
91  return None
92 
93 # print "\n\n\n THIS IS WHAT I RETURN: %s \n\n"%( selectedLS([251244,251251]) )
94 
95 
96 
97 
98 InputInfoNDefault=2000000
99 class InputInfo(object):
100  def __init__(self,dataSet,label='',run=[],ls={},files=1000,events=InputInfoNDefault,split=10,location='CAF',ib_blacklist=None,ib_block=None) :
101  self.run = run
102  self.ls = ls
103  self.files = files
104  self.events = events
105  self.location = location
106  self.label = label
107  self.dataSet = dataSet
108  self.split = split
109  self.ib_blacklist = ib_blacklist
110  self.ib_block = ib_block
111 
112  def das(self, das_options):
113  if len(self.run) is not 0 or self.ls:
114  # take at most 5 queries, to avoid sinking das
115 
116  # do if you have LS queries
117  # command = ";".join(["das_client.py %s --query '%s'" % (das_options, query) for query in self.queries()[:3] ])
118  command = ";".join(["das_client.py %s --query '%s'" % (das_options, query) for query in self.queries()[:3] ])
119  command = "({0})".format(command)
120  else:
121  command = "das_client.py %s --query '%s'" % (das_options, self.queries()[0])
122 
123  # Run filter on DAS output
124  if self.ib_blacklist:
125  command += " | grep -E -v "
126  command += " ".join(["-e '{0}'".format(pattern) for pattern in self.ib_blacklist])
127  command += " | sort -u"
128  return command
129 
130  def lumiRanges(self):
131  if len(self.run) != 0:
132  return "echo '{\n"+",".join(('"%d":[[1,268435455]]\n'%(x,) for x in self.run))+"}'"
133  if self.ls :
134  return "echo '{\n"+",".join(('"%d" : %s\n'%( int(x),self.ls[x]) for x in self.ls.keys()))+"}'"
135  return None
136 
137  def queries(self):
138  query_by = "block" if self.ib_block else "dataset"
139  query_source = "{0}#{1}".format(self.dataSet, self.ib_block) if self.ib_block else self.dataSet
140 
141  if self.ls :
142  the_queries = []
143  #for query_run in self.ls.keys():
144  # print "run is %s"%(query_run)
145  # if you have a LS list specified, still query das for the full run (multiple ls queries take forever)
146  # and use step1_lumiRanges.log to run only on LS which respect your selection
147 
148  # DO WE WANT T2_CERN ?
149  return ["file {0}={1} run={2}".format(query_by, query_source, query_run) for query_run in self.ls.keys()]
150  #return ["file {0}={1} run={2} site=T2_CH_CERN".format(query_by, query_source, query_run) for query_run in self.ls.keys()]
151 
152 
153  #
154  #for a_range in self.ls[query_run]:
155  # # print "a_range is %s"%(a_range)
156  # the_queries += ["file {0}={1} run={2} lumi={3} ".format(query_by, query_source, query_run, query_ls) for query_ls in expandLsInterval(a_range) ]
157  #print the_queries
158  return the_queries
159 
160  if len(self.run) is not 0:
161  return ["file {0}={1} run={2} site=T2_CH_CERN".format(query_by, query_source, query_run) for query_run in self.run]
162  #return ["file {0}={1} run={2} ".format(query_by, query_source, query_run) for query_run in self.run]
163  else:
164  return ["file {0}={1} site=T2_CH_CERN".format(query_by, query_source)]
165 
166  def __str__(self):
167  if self.ib_block:
168  return "input from: {0} with run {1}#{2}".format(self.dataSet, self.ib_block, self.run)
169  return "input from: {0} with run {1}".format(self.dataSet, self.run)
170 
171 
172 # merge dictionaries, with prioty on the [0] index
173 def merge(dictlist,TELL=False):
174  import copy
175  last=len(dictlist)-1
176  if TELL: print last,dictlist
177  if last==0:
178  # ONLY ONE ITEM LEFT
179  return copy.copy(dictlist[0])
180  else:
181  reducedlist=dictlist[0:max(0,last-1)]
182  if TELL: print reducedlist
183  # make a copy of the last item
184  d=copy.copy(dictlist[last])
185  # update with the last but one item
186  d.update(dictlist[last-1])
187  # and recursively do the rest
188  reducedlist.append(d)
189  return merge(reducedlist,TELL)
190 
191 def remove(d,key,TELL=False):
192  import copy
193  e = copy.deepcopy(d)
194  if TELL: print "original dict, BEF: %s"%d
195  del e[key]
196  if TELL: print "copy-removed dict, AFT: %s"%e
197  return e
198 
199 
200 #### Standard release validation samples ####
201 
202 stCond={'--conditions':'auto:run1_mc'}
203 def Kby(N,s):
204  return {'--relval':'%s000,%s'%(N,s)}
205 def Mby(N,s):
206  return {'--relval':'%s000000,%s'%(N,s)}
207 
208 def changeRefRelease(steps,listOfPairs):
209  for s in steps:
210  if ('INPUT' in steps[s]):
211  oldD=steps[s]['INPUT'].dataSet
212  for (ref,newRef) in listOfPairs:
213  if ref in oldD:
214  steps[s]['INPUT'].dataSet=oldD.replace(ref,newRef)
215  if '--pileup_input' in steps[s]:
216  for (ref,newRef) in listOfPairs:
217  if ref in steps[s]['--pileup_input']:
218  steps[s]['--pileup_input']=steps[s]['--pileup_input'].replace(ref,newRef)
219 
220 def addForAll(steps,d):
221  for s in steps:
222  steps[s].update(d)
223 
224 
225 def genvalid(fragment,d,suffix='all',fi='',dataSet=''):
226  import copy
227  c=copy.copy(d)
228  if suffix:
229  c['-s']=c['-s'].replace('genvalid','genvalid_'+suffix)
230  if fi:
231  c['--filein']='lhe:%d'%(fi,)
232  if dataSet:
233  c['--filein']='das:%s'%(dataSet,)
234  c['cfg']=fragment
235  return c
236 
237 
Definition: merge.py:1
def selectedLS
Definition: MatrixUtil.py:58
boost::dynamic_bitset append(const boost::dynamic_bitset<> &bs1, const boost::dynamic_bitset<> &bs2)
this method takes two bitsets bs1 and bs2 and returns result of bs2 appended to the end of bs1 ...
def expandLsInterval
Definition: MatrixUtil.py:46
def changeRefRelease
Definition: MatrixUtil.py:208
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def genvalid
Definition: MatrixUtil.py:225
#define update(a, b)
def addForAll
Definition: MatrixUtil.py:220
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run