CMS 3D CMS Logo

All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
Functions
dqmiodatasetharvest Namespace Reference

Functions

def dasquery (dataset)
 
def harvestfile (fname)
 
def tosqlite (x)
 

Function Documentation

def dqmiodatasetharvest.dasquery (   dataset)

Definition at line 126 of file dqmiodatasetharvest.py.

References edm.print().

Referenced by harvestfile().

126 def dasquery(dataset):
127  if not dataset.endswith("DQMIO"):
128  raise Exception("This tool probably cannot read the dataset you specified. The name should end with DQMIO.")
129  dasquery = ["dasgoclient", "-query=file dataset=%s" % dataset]
130  print("Querying das ... %s" % dasquery)
131  files = subprocess.check_output(dasquery)
132  files = files.splitlines()
133  print("Got %d files." % len(files))
134  return files
135 
136 
S & print(S &os, JobReport::InputFile const &f)
Definition: JobReport.cc:66
def dqmiodatasetharvest.harvestfile (   fname)

Definition at line 179 of file dqmiodatasetharvest.py.

References dasquery(), SiStripPI.max, min(), edm.print(), str, and tosqlite().

179 def harvestfile(fname):
180  f = ROOT.TFile.Open(ROOTPREFIX + fname)
181  idxtree = getattr(f, "Indices")
182  #idxtree.GetEntry._threaded = True # now the blocking call should release the GIL...
183 
184  # we have no good way to find out which lumis where processed in a job.
185  # so we watch the per-lumi indices and assume that all mentioned lumis
186  # are covered in the end-of-job MEs. This might fail if there are no
187  # per-lumi MEs.
188  knownlumis = set()
189  mes_to_store = []
190 
191  for i in range(idxtree.GetEntries()):
192  idxtree.GetEntry(i)
193  run, lumi, metype = idxtree.Run, idxtree.Lumi, idxtree.Type
194  if lumi != 0:
195  knownlumis.add(lumi)
196 
197  if not treenames[metype] in interesting_types:
198  continue
199 
200 
201  endrun = run # assume no multi-run files for now
202  if lumi == 0: # per-job ME
203  endlumi = max(knownlumis)
204  lumi = min(knownlumis)
205  else:
206  endlumi = lumi
207 
208  # inclusive range -- for 0 entries, row is left out
209  firstidx, lastidx = idxtree.FirstIndex, idxtree.LastIndex
210  metree = getattr(f, treenames[metype])
211  metree.SetBranchStatus("*",0)
212  metree.SetBranchStatus("FullName",1)
213 
214  for x in range(firstidx, lastidx+1):
215  metree.GetEntry(x)
216  mename = str(metree.FullName)
217  if mename in interesting_mes:
218  metree.GetEntry(x, 1)
219  value = metree.Value
220 
221  mes_to_store.append((
222  mename,
223  run, lumi, endrun, endlumi,
224  metype,
225  tosqlite(value),
226  ))
227 
228  return mes_to_store
229 
T min(T a, T b)
Definition: MathUtil.h:58
#define str(s)
def dqmiodatasetharvest.tosqlite (   x)

Definition at line 103 of file dqmiodatasetharvest.py.

References edmScanValgrind.buffer.

Referenced by harvestfile().

103 def tosqlite(x):
104  if isinstance(x, ROOT.string):
105  try:
106  return unicode(x.data())
107  except:
108  return buffer(x.data())
109  if isinstance(x, int):
110  return x
111  if isinstance(x, float):
112  return x
113  if isinstance(x, long):
114  return x
115  else:
116  try:
117  rootobj = unicode(ROOT.TBufferJSON.ConvertToJSON(x))
118  # turns out ROOT does not generate valid JSON for NaN/inf
119  clean = nan.sub('\\g<1>0\\g<2>', inf.sub('\\g<1>1e38\\g<2>', rootobj))
120  obj = json.loads(clean)
121  jsonobj = json.dumps(obj, allow_nan=False)
122  return jsonobj
123  except Exception as e:
124  return json.dumps({"root2sqlite_error": e.__repr__(), "root2sqlite_object": x.__repr__()})
125