CMS 3D CMS Logo

dqm_interfaces.py
Go to the documentation of this file.
1 ################################################################################
2 # RelMon: a tool for automatic Release Comparison
3 # https://twiki.cern.ch/twiki/bin/view/CMSPublic/RelMon
4 #
5 #
6 #
7 # Danilo Piparo CERN - danilo.piparo@cern.ch
8 #
9 ################################################################################
10 
11 from copy import deepcopy
12 from os import chdir,getcwd,makedirs
13 from os.path import abspath,exists,join, basename
14 from re import sub,search
15 from re import compile as recompile
16 from sys import exit,stderr,version_info
17 from threading import Thread,activeCount
18 from time import sleep
19 from urllib2 import Request,build_opener,urlopen
20 
21 import sys
22 argv=sys.argv
23 from ROOT import *
24 import ROOT
25 sys.argv=argv
26 
27 gROOT.SetBatch(True)
28 
29 from authentication import X509CertOpen
30 from dirstructure import Comparison,Directory,tcanvas_print_processes
31 from utils import Chi2,KS,BinToBin,Statistical_Tests,literal2root
32 
33 #-------------------------------------------------------------------------------
34 
36  """Base class for exceptions in this module."""
37  pass
38 
40  """Exception occurs in case of problems of communication with the server.
41  """
42  def __init__(self,msg):
43  self.msg = msg
44 
46 
47  def __init__(self,msg):
48  self.msg = msg
49 
50 #-----------------------------------------------------------------------------
51 
53 
54  """Communicate with the DQM Document server"""
55 
56  #-----------------------------------------------------------------------------
57 
58  base_dir='/data/json/archive/'
59 
60  def __init__(self,
61  server,
62  is_private=False,
63  ident="DQMToJson/1.0 python/%d.%d.%d" % version_info[:3]):
64  self.ident = ident
65  self.server = server
66  self.is_private = is_private
67  self.DQMpwd=DQMcommunicator.base_dir
68  self.prevDQMpwd=self.DQMpwd
69  self.opener=None
70  if not self.is_private:
71  self.opener=build_opener(X509CertOpen())
72  #-----------------------------------------------------------------------------
73 
74  def open_url(self,url):
75  url=url.replace(' ','%20')
76  datareq = Request(url)
77  datareq.add_header('User-agent', self.ident)
78  url_obj=0
79  if not self.is_private:
80  url_obj=self.opener.open(datareq)
81  #url_obj=build_opener(X509CertOpen()).open(datareq)
82  else:
83  url_obj=urlopen(datareq)
84 
85  return url_obj
86 
87  #-----------------------------------------------------------------------------
88 
89  def get_data(self, full_url):
90  #print "getting data from %s" %full_url
91  data = self.open_url(full_url).read()
92 
93  data = sub("-inf", '0', data)
94  data = sub("\s+inf", '0', data)
95  data = sub("\s+nan", '0', data)
96  data = sub('""(CMSSW.*?)""', '"\\1"', data)
97 
98  return data
99 
100  #-----------------------------------------------------------------------------
101 
102  def ls_url(self, url):
103  url=url.replace(" ","%20")
104  url=self.server+url
105  #print "listing "+url
106  form_folder={}
107  raw_folder=None
108  try:
109  raw_folder=eval(self.get_data(url))
110  except:
111  print "Retrying.."
112  for ntrials in xrange(5):
113  try:
114  if ntrials!=0:
115  sleep(2)
116  #raw_folder=loads(self.get_data(url))
117  raw_folder=eval(self.get_data(url))
118  break
119  except:
120  print "Could not fetch %s. Retrying" %url
121 
122  #raw_folder=loads(self.get_data(url))
123  for content_dict in raw_folder["contents"]:
124  if "subdir" in content_dict:
125  form_folder[content_dict["subdir"]]={"type":'dir'}
126  elif "obj" in content_dict:
127  properties=content_dict["properties"]
128  obj_name=content_dict["obj"]
129  obj_type=properties["type"]
130  obj_kind=properties["kind"]
131  obj_as_string=''
132  if "rootobj" in content_dict:
133  obj_as_string=content_dict["rootobj"]
134  form_folder[obj_name]={'type':obj_type,'obj_as_string':obj_as_string,"kind":obj_kind}
135  #for k,v in form_folder.items():
136  #print "* %s --> %s" %(k,v["type"])
137 
138  return form_folder
139 
140  #-----------------------------------------------------------------------------
141 
142  def ls(self, url='', fetch_root=False):
143  if len(url)==0:
144  url=join(self.DQMpwd,url)
145 
146  form_folder={}
147 
148  if fetch_root:
149  url='%s?rootcontent=1'%url
150  form_folder=self.ls_url(url)
151 
152  return form_folder
153 
154  #-----------------------------------------------------------------------------
155 
156  def cd(self, *args):
157  len_args=len(args)
158  full_url=""
159  if len_args!=1 and len_args!=3:
160  raise InvalidNumberOfArguments
161  if len_args==3:
162  dataset, run, folder = args
163  full_url='%s/data/json/archive/%s/%s/%s' % (self.server, dataset, run, folder)
164  if len_args==1:
165  folder=args[0]
166  if folder==self.DQMpwd:
167  full_url=self.DQMpwd
168  elif folder=="..":
169  full_url=self.DQMpwd[:self.DQMpwd.rfind("/")]
170  elif folder=="-":
171  full_url=self.oldDQMpwd
172  elif folder=="":
173  full_url=DQMcommunicator.base_dir
174  else:
175  full_url=self.DQMpwd+"/"+folder
176 
177  full_url=full_url.replace(' ','%20')
178  #print "cd: "+full_url
179 
180  self.oldDQMpwd=self.DQMpwd
181  self.DQMpwd=full_url
182  #print "In %s" %self.DQMpwd
183 
184  #-----------------------------------------------------------------------------
185 
186  def get_samples(self, samples_string="*"):
187  """
188  A sample contains, among the other things, a data type, a dataset name
189  and a run.
190  """
191  full_url='%s/data/json/samples?match=%s' % (self.server, samples_string)
192  samples_dict=eval(self.get_data(full_url))
193  return samples_dict["samples"]
194 
195  #-----------------------------------------------------------------------------
196 
197  def get_datasets_list(self, dataset_string=""):
198  samples_list=self.get_samples(dataset_string)
199  datasets_list=[]
200  for sample in samples_list:
201  temp_datasets_list = map(lambda item:item["dataset"] ,sample['items'])
202  for temp_dataset in temp_datasets_list:
203  if not temp_dataset in datasets_list:
204  datasets_list.append(temp_dataset)
205  return datasets_list
206 
207  #-----------------------------------------------------------------------------
208 
209  def get_RelVal_CMSSW_versions(self,query):
210  """Get the available cmssw versions for the relvals.
211  """
212  relvals_list=self.get_datasets_list(query)
213  # The samples are of the form /RelValTHISISMYFAVOURITECHANNEL/CMSSW_VERSION/GEN-SIM-WHATEVER-RECO
214  cmssw_versions_with_duplicates=map (lambda x: x.split("/")[2],relvals_list)
215  return list(set(cmssw_versions_with_duplicates))
216 
217  #-----------------------------------------------------------------------------
218 
219  def get_runs_list(self, dataset_string):
220  slash="/"
221  while(dataset_string.endswith(slash) or dataset_string.beginswith(slash)):
222  dataset_string=dataset_string.strip("/")
223  samples_list=self.get_samples(dataset_string)
224  runlist=[]
225  # Get all the runs in all the items which are in every sample
226  map( lambda sample: map (lambda item: runlist.append(item['run']), sample['items']), samples_list)
227  return runlist
228 
229  #-----------------------------------------------------------------------------
230 
231  def get_dataset_runs(self,dataset_string):
232  dataset_runs={}
233  for dataset in self.get_datasets_list(dataset_string):
234  dataset_runs[dataset]=self.get_runs_list(dataset)
235  return dataset_runs
236 
237  #-----------------------------------------------------------------------------
238 
239  def get_common_runs(self,dataset_string1,dataset_string2):
240  set1=set(self.get_runs_list(dataset_string1))
241  set2=set(self.get_runs_list(dataset_string2))
242  set1.intersection_update(set2)
243  return list (set2)
244 
245  #-----------------------------------------------------------------------------
246 
247  def get_root_objects_list(self, url=""):
248  if len(url)==0:
249  url=self.DQMpwd
250  else:
251  url="/"+url
252  url = url.replace(" ","%20")
253  objects=[]
254  for name,description in self.ls(url,True).items():
255  if "dir" not in description["type"] and "ROOT" in description["kind"]:
256  objects.append(literal2root(description["obj_as_string"],description["type"]))
257  return objects
258 
259  #-----------------------------------------------------------------------------
260 
261  def get_root_objects(self, url=""):
262  if len(url)==0:
263  url=self.DQMpwd
264  else:
265  url=self.server+"/"+url
266  url = url.replace(" ","%20")
267  objects={}
268  for name,description in self.ls(url,True).items():
269  if "dir" not in description["type"] and "ROOT" in description["kind"]:
270  objects[name]=literal2root(description["obj_as_string"],description["type"])
271  return objects
272 
273  #-------------------------------------------------------------------------------
274 
276  null_url = (len(url)==0)
277  if len(url)==0:
278  url=self.DQMpwd
279  else:
280  url="/"+url
281  url = url.replace(" ","%20")
282  if not null_url:
283  self.cd(url)
284  objects=[]
285  for name,description in self.ls("",True).items():
286  if "dir" in description["type"]:
287  objects+=self.get_root_objects_list_recursive(name)
288  self.cd("..")
289  elif "ROOT" in description["kind"]:
290  objects.append(literal2root(description["obj_as_string"],description["type"]))
291  if not null_url:
292  self.cd("..")
293  return objects
294 
295  #-------------------------------------------------------------------------------
296 
297  def get_root_objects_names_list_recursive(self, url="",present_url=""):
298  null_url = (len(url)==0)
299  if (not null_url):
300  if len(present_url)==0:
301  present_url=url
302  else:
303  present_url+="_%s"%url
304  if len(url)==0:
305  url=self.DQMpwd
306  else:
307  url="/"+url
308  url = url.replace(" ","%20")
309  if not null_url:
310  self.cd(url)
311  objects_names=[]
312  for name,description in self.ls("",False).items():
313  if "dir" in description["type"]:
314  objects_names+=self.get_root_objects_names_list_recursive(name,present_url)
315  self.cd("..")
316  elif "ROOT" in description["kind"]:
317  objects_names.append("%s_%s"%(present_url,name))
318  if not null_url:
319  self.cd("..")
320  return objects_names
321 
322  #-------------------------------------------------------------------------------
323 
324  def get_root_objects_recursive(self, url="",present_url=""):
325  null_url = (len(url)==0)
326  if (not null_url):
327  if len(present_url)==0:
328  present_url=url
329  else:
330  present_url+="_%s"%url
331  if len(url)==0:
332  url=self.DQMpwd
333  else:
334  url="/"+url
335  url = url.replace(" ","%20")
336  #if not null_url:
337  self.cd(url)
338  objects={}
339  for name,description in self.ls("",True).items():
340  if "dir" in description["type"]:
341  objects.update(self.get_root_objects_recursive(name,present_url))
342  self.cd("..")
343  elif "ROOT" in description["kind"]:
344  objects["%s_%s"%(present_url,name)]=literal2root(description["obj_as_string"],description["type"])
345  #if not null_url:
346  self.cd("..")
347  return objects
348 
349 #-------------------------------------------------------------------------------
350 
351 class DirID(object):
352  """Structure used to identify a directory in the walked tree,
353  It carries the name and depth information.
354  """
355  def __init__(self,name,depth,mother=""):
356  self.name=name
357  self.compname=recompile(name)
358  self.mother=mother
359  self.depth=depth
360  def __eq__(self,dirid):
361  depth2=dirid.depth
362  compname2=dirid.compname
363  name2=dirid.name
364  is_equal = False
365  #if self.name in name2 or name2 in self.name:
366  if search(self.compname,name2)!=None or search(compname2,self.name)!=None:
367  is_equal = self.depth*depth2 <0 or self.depth==depth2
368  if len(self.mother)*len(dirid.mother)>0:
369  is_equal = is_equal and self.mother==dirid.mother
370  return is_equal
371 
372  def __repr__(self):
373  return "Directory %s at level %s" %(self.name,self.depth)
374 
375 #-------------------------------------------------------------------------------
376 class DirFetcher(Thread):
377  """ Fetch the content of the single "directory" in the dqm.
378  """
379  def __init__ (self,comm,directory):
380  Thread.__init__(self)
381  self.comm = comm
382  self.directory = directory
383  self.contents=None
384  def run(self):
385  self.contents = self.comm.ls(self.directory,True)
386 
387 #-------------------------------------------------------------------------------
388 
389 class DirWalkerDB(Thread):
390  """An interface to the DQM document db. It is threaded to compensate the
391  latency introduced by the finite response time of the server.
392  """
393  def __init__ (self,comm1,comm2,base1,base2,directory,depth=0,do_pngs=True,stat_test="KS",test_threshold=.5,black_list=[]):
394  Thread.__init__(self)
395  self.comm1 = deepcopy(comm1)
396  self.comm2 = deepcopy(comm2)
397  self.base1,self.base2 = base1,base2
398  self.directory = directory
399  self.depth=depth
400  self.do_pngs=do_pngs
401  self.test_threshold=test_threshold
402  self.stat_test=stat_test
403  self.black_list=black_list
404  # name of the thread
405  self.name+="_%s" %directory.name
406 
407  def run(self):
408 
409  this_dir=DirID(self.directory.name,self.depth)
410  if this_dir in self.black_list:
411  print "Skipping %s since blacklisted!" %this_dir
412  return 0
413 
414  self.depth+=1
415 
416  the_test=Statistical_Tests[self.stat_test](self.test_threshold)
417  #print "Test %s with threshold %s" %(self.stat_test,self.test_threshold)
418 
419  directory1=self.base1+"/"+self.directory.mother_dir+"/"+self.directory.name
420  directory2=self.base2+"/"+self.directory.mother_dir+"/"+self.directory.name
421 
422  fetchers =(DirFetcher(self.comm1,directory1),DirFetcher(self.comm2,directory2))
423  for fetcher in fetchers:
424  fetcher.start()
425  for fetcher in fetchers:
426  fetcher.join()
427 
428  contents1 = fetchers[0].contents
429  contents2 = fetchers[1].contents
430  set1= set(contents1.keys())
431  set2= set(contents2.keys())
432 
433  walkers=[]
434  self_directory_directories=self.directory.subdirs
435  self_directory_comparisons=self.directory.comparisons
436  contents_names=list(set1.intersection(set2))
437 
438  for name in contents_names:
439  content = contents1[name]
440  if "dir" in content["type"]:
441  #if this_dir not in DirWalker.white_list:continue
442  subdir=Directory(name,join(self.directory.mother_dir,self.directory.name))
443  dirwalker=DirWalkerDB(self.comm1,self.comm2,self.base1,self.base2,subdir,self.depth,
444  self.do_pngs,self.stat_test,self.test_threshold,self.black_list)
445  dirwalker.start()
446  walkers.append(dirwalker)
447  n_threads=activeCount()
448  if n_threads>5:
449  #print >> stderr, "Threads that are running: %s. Joining them." %(n_threads)
450  dirwalker.join()
451  elif content["kind"]=="ROOT":
452 # print directory1,name
453  comparison=Comparison(name,
454  join(self.directory.mother_dir,self.directory.name),
455  literal2root(content["obj_as_string"],content["type"]),
456  literal2root(contents2[name]["obj_as_string"],content["type"]),
457  deepcopy(the_test),
458  do_pngs=self.do_pngs)
459  self_directory_comparisons.append(comparison)
460 
461 
462  for walker in walkers:
463  walker.join()
464  walker_directory=walker.directory
465  if not walker_directory.is_empty():
466  self_directory_directories.append(walker_directory)
467 
468 #-------------------------------------------------------------------------------
469 
471  """ Class acting as interface between the user and the harvested DQMRootFile.
472  It skips the directories created by the DQM infrastructure so to provide an
473  interface as similar as possible to a real direcory structure and to the
474  directory structure provided by the db interface.
475  """
476  def __init__(self,rootfilename):
477  dqmdatadir="DQMData"
478  self.rootfile=TFile(rootfilename)
479  self.rootfilepwd=self.rootfile.GetDirectory(dqmdatadir)
480  self.rootfileprevpwd=self.rootfile.GetDirectory(dqmdatadir)
481  if self.rootfilepwd == None:
482  print "Directory %s does not exist: skipping. Is this a custom rootfile?" %dqmdatadir
483  self.rootfilepwd=self.rootfile
484  self.rootfileprevpwd=self.rootfile
485 
486  def __is_null(self,directory,name):
487  is_null = not directory
488  if is_null:
489  print >> stderr, "Directory %s does not exist!" %name
490  return is_null
491 
492  def ls(self,directory_name=""):
493  contents={}
494  directory=None
495  if len(directory_name)==0:
496  directory=self.rootfilepwd
497 
498  directory=self.rootfilepwd.GetDirectory(directory_name)
499  if self.__is_null(directory,directory_name):
500  return contents
501 
502  for key in directory.GetListOfKeys():
503  contents[key.GetName()]=key.GetClassName()
504  return contents
505 
506  def cd(self,directory_name):
507  """Change the current TDirectoryFile. The familiar "-" and ".." directories
508  can be accessed as well.
509  """
510  if directory_name=="-":
511  tmp=self.rootfilepwd
512  self.rootfilepwd=self.rootfileprevpwd
513  self.rootfileprevpwd=tmp
514  if directory_name=="..":
515  #print "Setting prevpwd"
516  self.rootfileprevpwd=self.rootfilepwd
517  #print "The mom"
518  mom=self.rootfilepwd.GetMotherDir()
519  #print "In directory +%s+" %self.rootfilepwd
520  #print "Deleting the TFileDir"
521  if "Run " not in self.rootfilepwd.GetName():
522  self.rootfilepwd.Delete()
523  #print "Setting pwd to mom"
524  self.rootfilepwd=mom
525  else:
526  new_directory=self.rootfilepwd.GetDirectory(directory_name)
527  if not self.__is_null(new_directory,directory_name):
528  self.rootfileprevpwd=self.rootfilepwd
529  self.rootfilepwd=new_directory
530 
531  def getObj(self,objname):
532  """Get a TObject from the rootfile.
533  """
534  obj=self.rootfilepwd.Get(objname)
535  if not self.__is_null(obj,objname):
536  return obj
537 
538 #-------------------------------------------------------------------------------
539 
541  def __init__(self, name, topdirname,rootfilename1, rootfilename2, run=-1, black_list=[], stat_test="KS", test_threshold=.5,draw_success=True,do_pngs=False, black_list_histos=[]):
542  self.name=name
543  self.dqmrootfile1=DQMRootFile(abspath(rootfilename1))
544  self.dqmrootfile2=DQMRootFile(abspath(rootfilename2))
545  self.run=run
546  self.stat_test=Statistical_Tests[stat_test](test_threshold)
547  self.workdir=getcwd()
548  self.black_list=black_list
549  self.directory=Directory(topdirname)
550  #print "DIRWALKERFILE %s %s" %(draw_success,do_pngs)
551  self.directory.draw_success=draw_success
552  self.directory.do_pngs=do_pngs
553  self.black_list_histos = black_list_histos
555  self.filename1 = basename(rootfilename2)
556  self.filename2 = basename(rootfilename1)
557 
558  def __del__(self):
559  chdir(self.workdir)
560 
561  def cd(self,directory_name, on_disk=False, regexp=False,):
562  if regexp == True:
563  if len(directory_name)!=0:
564  if on_disk:
565  if not exists(directory_name):
566  makedirs(directory_name)
567  chdir(directory_name)
568  tmp = self.dqmrootfile2.ls().keys()
569  for elem in tmp:
570  if "Run" in elem:
571  next_dir = elem
572  self.dqmrootfile2.cd(next_dir)
573  tmp = self.dqmrootfile1.ls().keys()
574  for elem in tmp:
575  if "Run" in elem:
576  next_dir = elem
577  self.dqmrootfile1.cd(next_dir)
578  else:
579  if len(directory_name)!=0:
580  if on_disk:
581  if not exists(directory_name):
582  makedirs(directory_name)
583  chdir(directory_name)
584  self.dqmrootfile2.cd(directory_name)
585  self.dqmrootfile1.cd(directory_name)
586 
587  def ls(self,directory_name=""):
588  """Return common objects to the 2 files.
589  """
590  contents1=self.dqmrootfile1.ls(directory_name)
591  contents2=self.dqmrootfile2.ls(directory_name)
592  #print "cont1: %s"%(contents1)
593  #print "cont2: %s"%(contents2)
594  contents={}
595  self.different_histograms['file1']= {}
596  self.different_histograms['file2']= {}
597  keys = [key for key in contents2.keys() if key in contents1] #set of all possible contents from both files
598  #print " ## keys: %s" %(keys)
599  for key in keys: #iterate on all unique keys
600  if contents1[key]!=contents2[key]:
601  diff_file1 = set(contents1.keys()) - set(contents2.keys()) #set of contents that file1 is missing
602  diff_file2 = set(contents2.keys()) - set(contents1.keys()) #--'-- that file2 is missing
603  for key1 in diff_file1:
604  obj_type = contents1[key1]
605  if obj_type == "TDirectoryFile":
606  self.different_histograms['file1'][key1] = contents1[key1] #if direcory
607  #print "\n Missing inside a dir: ", self.ls(key1)
608  #contents[key] = contents1[key1]
609  if obj_type[:2]!="TH" and obj_type[:3]!="TPr" : #if histogram
610  continue
611  self.different_histograms['file1'][key1] = contents1[key1]
612  for key1 in diff_file2:
613  obj_type = contents2[key1]
614  if obj_type == "TDirectoryFile":
615  self.different_histograms['file2'][key1] = contents2[key1] #if direcory
616  #print "\n Missing inside a dir: ", self.ls(key1)
617  #contents[key] = contents2[key1]
618  if obj_type[:2]!="TH" and obj_type[:3]!="TPr" : #if histogram
619  continue
620  self.different_histograms['file2'][key1] = contents2[key1]
621  contents[key]=contents1[key]
622  return contents
623 
624  def getObjs(self,name):
625  h1=self.dqmrootfile1.getObj(name)
626  h2=self.dqmrootfile2.getObj(name)
627  return h1,h2
628 
629  def __fill_single_dir(self,dir_name,directory,mother_name="",depth=0):
630  #print "MOTHER NAME = +%s+" %mother_name
631  #print "About to study %s (in dir %s)" %(dir_name,getcwd())
632 
633  # see if in black_list
634  this_dir=DirID(dir_name,depth)
635  #print " ## this_dir: %s"%(this_dir)
636  if this_dir in self.black_list:
637  #print "Directory %s skipped because black-listed" %dir_name
638  return 0
639 
640  depth+=1
641 
642  self.cd(dir_name)
643  #if dir_name == 'HLTJETMET':
644  # print self.ls()
645 
646  #print "Test %s with thre %s" %(self.stat_test.name, self.stat_test.threshold)
647 
648  contents=self.ls()
649  if depth==1:
650  n_top_contents=len(contents)
651 
652  #print contents
653  cont_counter=1
654  comparisons=[]
655  for name,obj_type in contents.items():
656  if obj_type=="TDirectoryFile":
657  #We have a dir, launch recursion!
658  #Some feedback on the progress
659  if depth==1:
660  print "Studying directory %s, %s/%s" %(name,cont_counter,n_top_contents)
661  cont_counter+=1
662 
663  #print "Studying directory",name
664  # ok recursion on!
665  subdir=Directory(name)
666  subdir.draw_success=directory.draw_success
667  subdir.do_pngs=directory.do_pngs
668  self.__fill_single_dir(name,subdir,join(mother_name,dir_name),depth)
669  if not subdir.is_empty():
670  if depth==1:
671  print " ->Appending %s..." %name,
672  directory.subdirs.append(subdir)
673  if depth==1:
674  print "Appended."
675  else:
676  # We have probably an histo. Let's make the plot and the png.
677  if obj_type[:2]!="TH" and obj_type[:3]!="TPr" :
678  continue
679  h1,h2=self.getObjs(name)
680  #print "COMPARISON : +%s+%s+" %(mother_name,dir_name)
681  path = join(mother_name,dir_name,name)
682  if path in self.black_list_histos:
683  print " Skipping %s" %(path)
684  directory.comparisons.append(Comparison(name,
685  join(mother_name,dir_name),
686  h1,h2,
687  deepcopy(self.stat_test),
688  draw_success=directory.draw_success,
689  do_pngs=directory.do_pngs, skip=True))
690  else:
691  directory.comparisons.append(Comparison(name,
692  join(mother_name,dir_name),
693  h1,h2,
694  deepcopy(self.stat_test),
695  draw_success=directory.draw_success,
696  do_pngs=directory.do_pngs, skip=False))
697  directory.filename1 = self.filename1
698  directory.filename2 = self.filename2
699  directory.different_histograms['file1'] = self.different_histograms['file1']
700  directory.different_histograms['file2'] = self.different_histograms['file2']
701 
702  self.cd("..")
703 
704  def walk(self):
705  # Build the top dir in the rootfile first
706  rundir=""
707  if self.run<0:
708  # change dir in the first one...
709  #print self.ls().keys()
710  first_run_dir = ""
711  try:
712  first_run_dir = filter(lambda k: "Run " in k, self.ls().keys())[0]
713  except:
714  print "\nRundir not there: Is this a generic rootfile?\n"
715  rundir=first_run_dir
716  try:
717  self.run= int(rundir.split(" ")[1])
718  except:
719  print "Setting run number to 0"
720  self.run= 0
721  else:
722  rundir="Run %s"%self.run
723 
724  try:
725  self.cd(rundir, False, True) #True -> for checking the Rundir in case of different runs
726  except:
727  print "\nRundir not there: Is this a generic rootfile?\n"
728 
729  # Let's rock!
730  self.__fill_single_dir(self.directory.name,self.directory)
731  print "Finished"
732  n_left_threads=len(tcanvas_print_processes)
733  if n_left_threads>0:
734  print "Waiting for %s threads to finish..." %n_left_threads
735  for p in tcanvas_print_processes:
736  p.join()
737 
738 #-------------------------------------------------------------------------------
739 
741  def __init__(self, walker):
742  Thread.__init__(self)
743  self.walker=walker
744  def run(self):
745  self.walker.walk()
746 
747 #-------------------------------------------------------------------------------
748 
749 def string2blacklist(black_list_str):
750  black_list=[]
751  # replace the + with " ":
752  black_list_str=black_list_str.replace("__"," ")
753  if len(black_list_str)>0:
754  for ele in black_list_str.split(","):
755  dirname,level=ele.split("@")
756  level=int(level)
757  dirid=None
758  if "/" not in dirname:
759  dirid=DirID(dirname,level)
760  else:
761  mother,daughter=dirname.split("/")
762  dirid=DirID(daughter,level,mother)
763  if not dirid in black_list:
764  black_list.append(dirid)
765 
766  return black_list
767 
768 #-------------------------------------------------------------------------------
769 
def get_datasets_list(self, dataset_string="")
def __init__(self, name, depth, mother="")
def __fill_single_dir(self, dir_name, directory, mother_name="", depth=0)
std::vector< T >::const_iterator search(const cond::Time_t &val, const std::vector< T > &container)
Definition: IOVProxy.cc:314
def literal2root(literal, rootType, debug=False)
Definition: ROOTData.py:52
def ls(self, url='', fetch_root=False)
def __init__(self, comm1, comm2, base1, base2, directory, depth=0, do_pngs=True, stat_test="KS", test_threshold=.5, black_list=[])
def get_data(self, full_url)
def __init__(self, comm, directory)
def get_root_objects(self, url="")
def __init__(self, name, topdirname, rootfilename1, rootfilename2, run=-1, black_list=[], stat_test="KS", test_threshold=.5, draw_success=True, do_pngs=False, black_list_histos=[])
def get_root_objects_names_list_recursive(self, url="", present_url="")
def get_common_runs(self, dataset_string1, dataset_string2)
def get_dataset_runs(self, dataset_string)
def get_root_objects_recursive(self, url="", present_url="")
abspath
Definition: dataset.py:55
def cd(self, directory_name, on_disk=False, regexp=False)
def __is_null(self, directory, name)
def ls(self, directory_name="")
def ls(self, directory_name="")
def getObj(self, objname)
def cd(self, directory_name)
def get_root_objects_list_recursive(self, url="")
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def get_runs_list(self, dataset_string)
def __init__(self, server, is_private=False, ident="DQMToJson/1.0 python/%d.%d.%d"%version_info[:3])
def get_root_objects_list(self, url="")
def get_RelVal_CMSSW_versions(self, query)
def __eq__(self, dirid)
def get_samples(self, samples_string="*")
def __init__(self, rootfilename)
def string2blacklist(black_list_str)
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run