CMS 3D CMS Logo

dqm_interfaces.py
Go to the documentation of this file.
1 from __future__ import print_function
2 ################################################################################
3 # RelMon: a tool for automatic Release Comparison
4 # https://twiki.cern.ch/twiki/bin/view/CMSPublic/RelMon
5 #
6 #
7 #
8 # Danilo Piparo CERN - danilo.piparo@cern.ch
9 #
10 ################################################################################
11 
12 from copy import deepcopy
13 from os import chdir,getcwd,makedirs
14 from os.path import abspath,exists,join, basename
15 from re import sub,search
16 from re import compile as recompile
17 from sys import exit,stderr,version_info
18 from threading import Thread,activeCount
19 from time import sleep
20 from urllib2 import Request,build_opener,urlopen
21 
22 import sys
23 argv=sys.argv
24 from ROOT import *
25 import ROOT
26 sys.argv=argv
27 
28 gROOT.SetBatch(True)
29 
30 from authentication import X509CertOpen
31 from dirstructure import Comparison,Directory,tcanvas_print_processes
32 from utils import Chi2,KS,BinToBin,Statistical_Tests,literal2root
33 
34 #-------------------------------------------------------------------------------
35 
37  """Base class for exceptions in this module."""
38  pass
39 
41  """Exception occurs in case of problems of communication with the server.
42  """
43  def __init__(self,msg):
44  self.msg = msg
45 
47 
48  def __init__(self,msg):
49  self.msg = msg
50 
51 #-----------------------------------------------------------------------------
52 
54 
55  """Communicate with the DQM Document server"""
56 
57  #-----------------------------------------------------------------------------
58 
59  base_dir='/data/json/archive/'
60 
61  def __init__(self,
62  server,
63  is_private=False,
64  ident="DQMToJson/1.0 python/%d.%d.%d" % version_info[:3]):
65  self.ident = ident
66  self.server = server
67  self.is_private = is_private
68  self.DQMpwd=DQMcommunicator.base_dir
69  self.prevDQMpwd=self.DQMpwd
70  self.opener=None
71  if not self.is_private:
72  self.opener=build_opener(X509CertOpen())
73  #-----------------------------------------------------------------------------
74 
75  def open_url(self,url):
76  url=url.replace(' ','%20')
77  datareq = Request(url)
78  datareq.add_header('User-agent', self.ident)
79  url_obj=0
80  if not self.is_private:
81  url_obj=self.opener.open(datareq)
82  #url_obj=build_opener(X509CertOpen()).open(datareq)
83  else:
84  url_obj=urlopen(datareq)
85 
86  return url_obj
87 
88  #-----------------------------------------------------------------------------
89 
90  def get_data(self, full_url):
91  #print "getting data from %s" %full_url
92  data = self.open_url(full_url).read()
93 
94  data = sub("-inf", '0', data)
95  data = sub("\s+inf", '0', data)
96  data = sub("\s+nan", '0', data)
97  data = sub('""(CMSSW.*?)""', '"\\1"', data)
98 
99  return data
100 
101  #-----------------------------------------------------------------------------
102 
103  def ls_url(self, url):
104  url=url.replace(" ","%20")
105  url=self.server+url
106  #print "listing "+url
107  form_folder={}
108  raw_folder=None
109  try:
110  raw_folder=eval(self.get_data(url))
111  except:
112  print("Retrying..")
113  for ntrials in xrange(5):
114  try:
115  if ntrials!=0:
116  sleep(2)
117  #raw_folder=loads(self.get_data(url))
118  raw_folder=eval(self.get_data(url))
119  break
120  except:
121  print("Could not fetch %s. Retrying" %url)
122 
123  #raw_folder=loads(self.get_data(url))
124  for content_dict in raw_folder["contents"]:
125  if "subdir" in content_dict:
126  form_folder[content_dict["subdir"]]={"type":'dir'}
127  elif "obj" in content_dict:
128  properties=content_dict["properties"]
129  obj_name=content_dict["obj"]
130  obj_type=properties["type"]
131  obj_kind=properties["kind"]
132  obj_as_string=''
133  if "rootobj" in content_dict:
134  obj_as_string=content_dict["rootobj"]
135  form_folder[obj_name]={'type':obj_type,'obj_as_string':obj_as_string,"kind":obj_kind}
136  #for k,v in form_folder.items():
137  #print "* %s --> %s" %(k,v["type"])
138 
139  return form_folder
140 
141  #-----------------------------------------------------------------------------
142 
143  def ls(self, url='', fetch_root=False):
144  if len(url)==0:
145  url=join(self.DQMpwd,url)
146 
147  form_folder={}
148 
149  if fetch_root:
150  url='%s?rootcontent=1'%url
151  form_folder=self.ls_url(url)
152 
153  return form_folder
154 
155  #-----------------------------------------------------------------------------
156 
157  def cd(self, *args):
158  len_args=len(args)
159  full_url=""
160  if len_args!=1 and len_args!=3:
161  raise InvalidNumberOfArguments
162  if len_args==3:
163  dataset, run, folder = args
164  full_url='%s/data/json/archive/%s/%s/%s' % (self.server, dataset, run, folder)
165  if len_args==1:
166  folder=args[0]
167  if folder==self.DQMpwd:
168  full_url=self.DQMpwd
169  elif folder=="..":
170  full_url=self.DQMpwd[:self.DQMpwd.rfind("/")]
171  elif folder=="-":
172  full_url=self.oldDQMpwd
173  elif folder=="":
174  full_url=DQMcommunicator.base_dir
175  else:
176  full_url=self.DQMpwd+"/"+folder
177 
178  full_url=full_url.replace(' ','%20')
179  #print "cd: "+full_url
180 
181  self.oldDQMpwd=self.DQMpwd
182  self.DQMpwd=full_url
183  #print "In %s" %self.DQMpwd
184 
185  #-----------------------------------------------------------------------------
186 
187  def get_samples(self, samples_string="*"):
188  """
189  A sample contains, among the other things, a data type, a dataset name
190  and a run.
191  """
192  full_url='%s/data/json/samples?match=%s' % (self.server, samples_string)
193  samples_dict=eval(self.get_data(full_url))
194  return samples_dict["samples"]
195 
196  #-----------------------------------------------------------------------------
197 
198  def get_datasets_list(self, dataset_string=""):
199  samples_list=self.get_samples(dataset_string)
200  datasets_list=[]
201  for sample in samples_list:
202  temp_datasets_list = map(lambda item:item["dataset"] ,sample['items'])
203  for temp_dataset in temp_datasets_list:
204  if not temp_dataset in datasets_list:
205  datasets_list.append(temp_dataset)
206  return datasets_list
207 
208  #-----------------------------------------------------------------------------
209 
210  def get_RelVal_CMSSW_versions(self,query):
211  """Get the available cmssw versions for the relvals.
212  """
213  relvals_list=self.get_datasets_list(query)
214  # The samples are of the form /RelValTHISISMYFAVOURITECHANNEL/CMSSW_VERSION/GEN-SIM-WHATEVER-RECO
215  cmssw_versions_with_duplicates=map (lambda x: x.split("/")[2],relvals_list)
216  return list(set(cmssw_versions_with_duplicates))
217 
218  #-----------------------------------------------------------------------------
219 
220  def get_runs_list(self, dataset_string):
221  slash="/"
222  while(dataset_string.endswith(slash) or dataset_string.beginswith(slash)):
223  dataset_string=dataset_string.strip("/")
224  samples_list=self.get_samples(dataset_string)
225  runlist=[]
226  # Get all the runs in all the items which are in every sample
227  map( lambda sample: map (lambda item: runlist.append(item['run']), sample['items']), samples_list)
228  return runlist
229 
230  #-----------------------------------------------------------------------------
231 
232  def get_dataset_runs(self,dataset_string):
233  dataset_runs={}
234  for dataset in self.get_datasets_list(dataset_string):
235  dataset_runs[dataset]=self.get_runs_list(dataset)
236  return dataset_runs
237 
238  #-----------------------------------------------------------------------------
239 
240  def get_common_runs(self,dataset_string1,dataset_string2):
241  set1=set(self.get_runs_list(dataset_string1))
242  set2=set(self.get_runs_list(dataset_string2))
243  set1.intersection_update(set2)
244  return list (set2)
245 
246  #-----------------------------------------------------------------------------
247 
248  def get_root_objects_list(self, url=""):
249  if len(url)==0:
250  url=self.DQMpwd
251  else:
252  url="/"+url
253  url = url.replace(" ","%20")
254  objects=[]
255  for name,description in self.ls(url,True).items():
256  if "dir" not in description["type"] and "ROOT" in description["kind"]:
257  objects.append(literal2root(description["obj_as_string"],description["type"]))
258  return objects
259 
260  #-----------------------------------------------------------------------------
261 
262  def get_root_objects(self, url=""):
263  if len(url)==0:
264  url=self.DQMpwd
265  else:
266  url=self.server+"/"+url
267  url = url.replace(" ","%20")
268  objects={}
269  for name,description in self.ls(url,True).items():
270  if "dir" not in description["type"] and "ROOT" in description["kind"]:
271  objects[name]=literal2root(description["obj_as_string"],description["type"])
272  return objects
273 
274  #-------------------------------------------------------------------------------
275 
277  null_url = (len(url)==0)
278  if len(url)==0:
279  url=self.DQMpwd
280  else:
281  url="/"+url
282  url = url.replace(" ","%20")
283  if not null_url:
284  self.cd(url)
285  objects=[]
286  for name,description in self.ls("",True).items():
287  if "dir" in description["type"]:
288  objects+=self.get_root_objects_list_recursive(name)
289  self.cd("..")
290  elif "ROOT" in description["kind"]:
291  objects.append(literal2root(description["obj_as_string"],description["type"]))
292  if not null_url:
293  self.cd("..")
294  return objects
295 
296  #-------------------------------------------------------------------------------
297 
298  def get_root_objects_names_list_recursive(self, url="",present_url=""):
299  null_url = (len(url)==0)
300  if (not null_url):
301  if len(present_url)==0:
302  present_url=url
303  else:
304  present_url+="_%s"%url
305  if len(url)==0:
306  url=self.DQMpwd
307  else:
308  url="/"+url
309  url = url.replace(" ","%20")
310  if not null_url:
311  self.cd(url)
312  objects_names=[]
313  for name,description in self.ls("",False).items():
314  if "dir" in description["type"]:
315  objects_names+=self.get_root_objects_names_list_recursive(name,present_url)
316  self.cd("..")
317  elif "ROOT" in description["kind"]:
318  objects_names.append("%s_%s"%(present_url,name))
319  if not null_url:
320  self.cd("..")
321  return objects_names
322 
323  #-------------------------------------------------------------------------------
324 
325  def get_root_objects_recursive(self, url="",present_url=""):
326  null_url = (len(url)==0)
327  if (not null_url):
328  if len(present_url)==0:
329  present_url=url
330  else:
331  present_url+="_%s"%url
332  if len(url)==0:
333  url=self.DQMpwd
334  else:
335  url="/"+url
336  url = url.replace(" ","%20")
337  #if not null_url:
338  self.cd(url)
339  objects={}
340  for name,description in self.ls("",True).items():
341  if "dir" in description["type"]:
342  objects.update(self.get_root_objects_recursive(name,present_url))
343  self.cd("..")
344  elif "ROOT" in description["kind"]:
345  objects["%s_%s"%(present_url,name)]=literal2root(description["obj_as_string"],description["type"])
346  #if not null_url:
347  self.cd("..")
348  return objects
349 
350 #-------------------------------------------------------------------------------
351 
352 class DirID(object):
353  """Structure used to identify a directory in the walked tree,
354  It carries the name and depth information.
355  """
356  def __init__(self,name,depth,mother=""):
357  self.name=name
358  self.compname=recompile(name)
359  self.mother=mother
360  self.depth=depth
361  def __eq__(self,dirid):
362  depth2=dirid.depth
363  compname2=dirid.compname
364  name2=dirid.name
365  is_equal = False
366  #if self.name in name2 or name2 in self.name:
367  if search(self.compname,name2)!=None or search(compname2,self.name)!=None:
368  is_equal = self.depth*depth2 <0 or self.depth==depth2
369  if len(self.mother)*len(dirid.mother)>0:
370  is_equal = is_equal and self.mother==dirid.mother
371  return is_equal
372 
373  def __repr__(self):
374  return "Directory %s at level %s" %(self.name,self.depth)
375 
376 #-------------------------------------------------------------------------------
377 class DirFetcher(Thread):
378  """ Fetch the content of the single "directory" in the dqm.
379  """
380  def __init__ (self,comm,directory):
381  Thread.__init__(self)
382  self.comm = comm
383  self.directory = directory
384  self.contents=None
385  def run(self):
386  self.contents = self.comm.ls(self.directory,True)
387 
388 #-------------------------------------------------------------------------------
389 
390 class DirWalkerDB(Thread):
391  """An interface to the DQM document db. It is threaded to compensate the
392  latency introduced by the finite response time of the server.
393  """
394  def __init__ (self,comm1,comm2,base1,base2,directory,depth=0,do_pngs=True,stat_test="KS",test_threshold=.5,black_list=[]):
395  Thread.__init__(self)
396  self.comm1 = deepcopy(comm1)
397  self.comm2 = deepcopy(comm2)
398  self.base1,self.base2 = base1,base2
399  self.directory = directory
400  self.depth=depth
401  self.do_pngs=do_pngs
402  self.test_threshold=test_threshold
403  self.stat_test=stat_test
404  self.black_list=black_list
405  # name of the thread
406  self.name+="_%s" %directory.name
407 
408  def run(self):
409 
410  this_dir=DirID(self.directory.name,self.depth)
411  if this_dir in self.black_list:
412  print("Skipping %s since blacklisted!" %this_dir)
413  return 0
414 
415  self.depth+=1
416 
417  the_test=Statistical_Tests[self.stat_test](self.test_threshold)
418  #print "Test %s with threshold %s" %(self.stat_test,self.test_threshold)
419 
420  directory1=self.base1+"/"+self.directory.mother_dir+"/"+self.directory.name
421  directory2=self.base2+"/"+self.directory.mother_dir+"/"+self.directory.name
422 
423  fetchers =(DirFetcher(self.comm1,directory1),DirFetcher(self.comm2,directory2))
424  for fetcher in fetchers:
425  fetcher.start()
426  for fetcher in fetchers:
427  fetcher.join()
428 
429  contents1 = fetchers[0].contents
430  contents2 = fetchers[1].contents
431  set1= set(contents1.keys())
432  set2= set(contents2.keys())
433 
434  walkers=[]
435  self_directory_directories=self.directory.subdirs
436  self_directory_comparisons=self.directory.comparisons
437  contents_names=list(set1.intersection(set2))
438 
439  for name in contents_names:
440  content = contents1[name]
441  if "dir" in content["type"]:
442  #if this_dir not in DirWalker.white_list:continue
443  subdir=Directory(name,join(self.directory.mother_dir,self.directory.name))
444  dirwalker=DirWalkerDB(self.comm1,self.comm2,self.base1,self.base2,subdir,self.depth,
445  self.do_pngs,self.stat_test,self.test_threshold,self.black_list)
446  dirwalker.start()
447  walkers.append(dirwalker)
448  n_threads=activeCount()
449  if n_threads>5:
450  #print >> stderr, "Threads that are running: %s. Joining them." %(n_threads)
451  dirwalker.join()
452  elif content["kind"]=="ROOT":
453 # print directory1,name
454  comparison=Comparison(name,
455  join(self.directory.mother_dir,self.directory.name),
456  literal2root(content["obj_as_string"],content["type"]),
457  literal2root(contents2[name]["obj_as_string"],content["type"]),
458  deepcopy(the_test),
459  do_pngs=self.do_pngs)
460  self_directory_comparisons.append(comparison)
461 
462 
463  for walker in walkers:
464  walker.join()
465  walker_directory=walker.directory
466  if not walker_directory.is_empty():
467  self_directory_directories.append(walker_directory)
468 
469 #-------------------------------------------------------------------------------
470 
472  """ Class acting as interface between the user and the harvested DQMRootFile.
473  It skips the directories created by the DQM infrastructure so to provide an
474  interface as similar as possible to a real direcory structure and to the
475  directory structure provided by the db interface.
476  """
477  def __init__(self,rootfilename):
478  dqmdatadir="DQMData"
479  self.rootfile=TFile(rootfilename)
480  self.rootfilepwd=self.rootfile.GetDirectory(dqmdatadir)
481  self.rootfileprevpwd=self.rootfile.GetDirectory(dqmdatadir)
482  if self.rootfilepwd == None:
483  print("Directory %s does not exist: skipping. Is this a custom rootfile?" %dqmdatadir)
484  self.rootfilepwd=self.rootfile
485  self.rootfileprevpwd=self.rootfile
486 
487  def __is_null(self,directory,name):
488  is_null = not directory
489  if is_null:
490  print("Directory %s does not exist!" %name, file=stderr)
491  return is_null
492 
493  def ls(self,directory_name=""):
494  contents={}
495  directory=None
496  if len(directory_name)==0:
497  directory=self.rootfilepwd
498 
499  directory=self.rootfilepwd.GetDirectory(directory_name)
500  if self.__is_null(directory,directory_name):
501  return contents
502 
503  for key in directory.GetListOfKeys():
504  contents[key.GetName()]=key.GetClassName()
505  return contents
506 
507  def cd(self,directory_name):
508  """Change the current TDirectoryFile. The familiar "-" and ".." directories
509  can be accessed as well.
510  """
511  if directory_name=="-":
512  tmp=self.rootfilepwd
513  self.rootfilepwd=self.rootfileprevpwd
514  self.rootfileprevpwd=tmp
515  if directory_name=="..":
516  #print "Setting prevpwd"
517  self.rootfileprevpwd=self.rootfilepwd
518  #print "The mom"
519  mom=self.rootfilepwd.GetMotherDir()
520  #print "In directory +%s+" %self.rootfilepwd
521  #print "Deleting the TFileDir"
522  if "Run " not in self.rootfilepwd.GetName():
523  self.rootfilepwd.Delete()
524  #print "Setting pwd to mom"
525  self.rootfilepwd=mom
526  else:
527  new_directory=self.rootfilepwd.GetDirectory(directory_name)
528  if not self.__is_null(new_directory,directory_name):
529  self.rootfileprevpwd=self.rootfilepwd
530  self.rootfilepwd=new_directory
531 
532  def getObj(self,objname):
533  """Get a TObject from the rootfile.
534  """
535  obj=self.rootfilepwd.Get(objname)
536  if not self.__is_null(obj,objname):
537  return obj
538 
539 #-------------------------------------------------------------------------------
540 
542  def __init__(self, name, topdirname,rootfilename1, rootfilename2, run=-1, black_list=[], stat_test="KS", test_threshold=.5,draw_success=True,do_pngs=False, black_list_histos=[]):
543  self.name=name
544  self.dqmrootfile1=DQMRootFile(abspath(rootfilename1))
545  self.dqmrootfile2=DQMRootFile(abspath(rootfilename2))
546  self.run=run
547  self.stat_test=Statistical_Tests[stat_test](test_threshold)
548  self.workdir=getcwd()
549  self.black_list=black_list
550  self.directory=Directory(topdirname)
551  #print "DIRWALKERFILE %s %s" %(draw_success,do_pngs)
552  self.directory.draw_success=draw_success
553  self.directory.do_pngs=do_pngs
554  self.black_list_histos = black_list_histos
556  self.filename1 = basename(rootfilename2)
557  self.filename2 = basename(rootfilename1)
558 
559  def __del__(self):
560  chdir(self.workdir)
561 
562  def cd(self,directory_name, on_disk=False, regexp=False,):
563  if regexp == True:
564  if len(directory_name)!=0:
565  if on_disk:
566  if not exists(directory_name):
567  makedirs(directory_name)
568  chdir(directory_name)
569  tmp = self.dqmrootfile2.ls().keys()
570  for elem in tmp:
571  if "Run" in elem:
572  next_dir = elem
573  self.dqmrootfile2.cd(next_dir)
574  tmp = self.dqmrootfile1.ls().keys()
575  for elem in tmp:
576  if "Run" in elem:
577  next_dir = elem
578  self.dqmrootfile1.cd(next_dir)
579  else:
580  if len(directory_name)!=0:
581  if on_disk:
582  if not exists(directory_name):
583  makedirs(directory_name)
584  chdir(directory_name)
585  self.dqmrootfile2.cd(directory_name)
586  self.dqmrootfile1.cd(directory_name)
587 
588  def ls(self,directory_name=""):
589  """Return common objects to the 2 files.
590  """
591  contents1=self.dqmrootfile1.ls(directory_name)
592  contents2=self.dqmrootfile2.ls(directory_name)
593  #print "cont1: %s"%(contents1)
594  #print "cont2: %s"%(contents2)
595  contents={}
596  self.different_histograms['file1']= {}
597  self.different_histograms['file2']= {}
598  keys = [key for key in contents2.keys() if key in contents1] #set of all possible contents from both files
599  #print " ## keys: %s" %(keys)
600  for key in keys: #iterate on all unique keys
601  if contents1[key]!=contents2[key]:
602  diff_file1 = set(contents1.keys()) - set(contents2.keys()) #set of contents that file1 is missing
603  diff_file2 = set(contents2.keys()) - set(contents1.keys()) #--'-- that file2 is missing
604  for key1 in diff_file1:
605  obj_type = contents1[key1]
606  if obj_type == "TDirectoryFile":
607  self.different_histograms['file1'][key1] = contents1[key1] #if direcory
608  #print "\n Missing inside a dir: ", self.ls(key1)
609  #contents[key] = contents1[key1]
610  if obj_type[:2]!="TH" and obj_type[:3]!="TPr" : #if histogram
611  continue
612  self.different_histograms['file1'][key1] = contents1[key1]
613  for key1 in diff_file2:
614  obj_type = contents2[key1]
615  if obj_type == "TDirectoryFile":
616  self.different_histograms['file2'][key1] = contents2[key1] #if direcory
617  #print "\n Missing inside a dir: ", self.ls(key1)
618  #contents[key] = contents2[key1]
619  if obj_type[:2]!="TH" and obj_type[:3]!="TPr" : #if histogram
620  continue
621  self.different_histograms['file2'][key1] = contents2[key1]
622  contents[key]=contents1[key]
623  return contents
624 
625  def getObjs(self,name):
626  h1=self.dqmrootfile1.getObj(name)
627  h2=self.dqmrootfile2.getObj(name)
628  return h1,h2
629 
630  def __fill_single_dir(self,dir_name,directory,mother_name="",depth=0):
631  #print "MOTHER NAME = +%s+" %mother_name
632  #print "About to study %s (in dir %s)" %(dir_name,getcwd())
633 
634  # see if in black_list
635  this_dir=DirID(dir_name,depth)
636  #print " ## this_dir: %s"%(this_dir)
637  if this_dir in self.black_list:
638  #print "Directory %s skipped because black-listed" %dir_name
639  return 0
640 
641  depth+=1
642 
643  self.cd(dir_name)
644  #if dir_name == 'HLTJETMET':
645  # print self.ls()
646 
647  #print "Test %s with thre %s" %(self.stat_test.name, self.stat_test.threshold)
648 
649  contents=self.ls()
650  if depth==1:
651  n_top_contents=len(contents)
652 
653  #print contents
654  cont_counter=1
655  comparisons=[]
656  for name,obj_type in contents.items():
657  if obj_type=="TDirectoryFile":
658  #We have a dir, launch recursion!
659  #Some feedback on the progress
660  if depth==1:
661  print("Studying directory %s, %s/%s" %(name,cont_counter,n_top_contents))
662  cont_counter+=1
663 
664  #print "Studying directory",name
665  # ok recursion on!
666  subdir=Directory(name)
667  subdir.draw_success=directory.draw_success
668  subdir.do_pngs=directory.do_pngs
669  self.__fill_single_dir(name,subdir,join(mother_name,dir_name),depth)
670  if not subdir.is_empty():
671  if depth==1:
672  print(" ->Appending %s..." %name, end=' ')
673  directory.subdirs.append(subdir)
674  if depth==1:
675  print("Appended.")
676  else:
677  # We have probably an histo. Let's make the plot and the png.
678  if obj_type[:2]!="TH" and obj_type[:3]!="TPr" :
679  continue
680  h1,h2=self.getObjs(name)
681  #print "COMPARISON : +%s+%s+" %(mother_name,dir_name)
682  path = join(mother_name,dir_name,name)
683  if path in self.black_list_histos:
684  print(" Skipping %s" %(path))
685  directory.comparisons.append(Comparison(name,
686  join(mother_name,dir_name),
687  h1,h2,
688  deepcopy(self.stat_test),
689  draw_success=directory.draw_success,
690  do_pngs=directory.do_pngs, skip=True))
691  else:
692  directory.comparisons.append(Comparison(name,
693  join(mother_name,dir_name),
694  h1,h2,
695  deepcopy(self.stat_test),
696  draw_success=directory.draw_success,
697  do_pngs=directory.do_pngs, skip=False))
698  directory.filename1 = self.filename1
699  directory.filename2 = self.filename2
700  directory.different_histograms['file1'] = self.different_histograms['file1']
701  directory.different_histograms['file2'] = self.different_histograms['file2']
702 
703  self.cd("..")
704 
705  def walk(self):
706  # Build the top dir in the rootfile first
707  rundir=""
708  if self.run<0:
709  # change dir in the first one...
710  #print self.ls().keys()
711  first_run_dir = ""
712  try:
713  first_run_dir = filter(lambda k: "Run " in k, self.ls().keys())[0]
714  except:
715  print("\nRundir not there: Is this a generic rootfile?\n")
716  rundir=first_run_dir
717  try:
718  self.run= int(rundir.split(" ")[1])
719  except:
720  print("Setting run number to 0")
721  self.run= 0
722  else:
723  rundir="Run %s"%self.run
724 
725  try:
726  self.cd(rundir, False, True) #True -> for checking the Rundir in case of different runs
727  except:
728  print("\nRundir not there: Is this a generic rootfile?\n")
729 
730  # Let's rock!
731  self.__fill_single_dir(self.directory.name,self.directory)
732  print("Finished")
733  n_left_threads=len(tcanvas_print_processes)
734  if n_left_threads>0:
735  print("Waiting for %s threads to finish..." %n_left_threads)
736  for p in tcanvas_print_processes:
737  p.join()
738 
739 #-------------------------------------------------------------------------------
740 
742  def __init__(self, walker):
743  Thread.__init__(self)
744  self.walker=walker
745  def run(self):
746  self.walker.walk()
747 
748 #-------------------------------------------------------------------------------
749 
750 def string2blacklist(black_list_str):
751  black_list=[]
752  # replace the + with " ":
753  black_list_str=black_list_str.replace("__"," ")
754  if len(black_list_str)>0:
755  for ele in black_list_str.split(","):
756  dirname,level=ele.split("@")
757  level=int(level)
758  dirid=None
759  if "/" not in dirname:
760  dirid=DirID(dirname,level)
761  else:
762  mother,daughter=dirname.split("/")
763  dirid=DirID(daughter,level,mother)
764  if not dirid in black_list:
765  black_list.append(dirid)
766 
767  return black_list
768 
769 #-------------------------------------------------------------------------------
770 
def get_datasets_list(self, dataset_string="")
def __init__(self, name, depth, mother="")
def __fill_single_dir(self, dir_name, directory, mother_name="", depth=0)
std::vector< T >::const_iterator search(const cond::Time_t &val, const std::vector< T > &container)
Definition: IOVProxy.cc:314
def literal2root(literal, rootType, debug=False)
Definition: ROOTData.py:53
def ls(self, url='', fetch_root=False)
def __init__(self, comm1, comm2, base1, base2, directory, depth=0, do_pngs=True, stat_test="KS", test_threshold=.5, black_list=[])
def get_data(self, full_url)
S & print(S &os, JobReport::InputFile const &f)
Definition: JobReport.cc:65
def __init__(self, comm, directory)
def get_root_objects(self, url="")
def __init__(self, name, topdirname, rootfilename1, rootfilename2, run=-1, black_list=[], stat_test="KS", test_threshold=.5, draw_success=True, do_pngs=False, black_list_histos=[])
def get_root_objects_names_list_recursive(self, url="", present_url="")
def get_common_runs(self, dataset_string1, dataset_string2)
def get_dataset_runs(self, dataset_string)
def get_root_objects_recursive(self, url="", present_url="")
abspath
Definition: dataset.py:55
def cd(self, directory_name, on_disk=False, regexp=False)
def __is_null(self, directory, name)
def ls(self, directory_name="")
def ls(self, directory_name="")
def getObj(self, objname)
def cd(self, directory_name)
def get_root_objects_list_recursive(self, url="")
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def get_runs_list(self, dataset_string)
def __init__(self, server, is_private=False, ident="DQMToJson/1.0 python/%d.%d.%d"%version_info[:3])
def get_root_objects_list(self, url="")
def get_RelVal_CMSSW_versions(self, query)
def __eq__(self, dirid)
def get_samples(self, samples_string="*")
def __init__(self, rootfilename)
def string2blacklist(black_list_str)
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run