CMS 3D CMS Logo

dqm_interfaces.py
Go to the documentation of this file.
1 from __future__ import print_function
2 from __future__ import absolute_import
3 ################################################################################
4 # RelMon: a tool for automatic Release Comparison
5 # https://twiki.cern.ch/twiki/bin/view/CMSPublic/RelMon
6 #
7 #
8 #
9 # Danilo Piparo CERN - danilo.piparo@cern.ch
10 #
11 ################################################################################
12 
13 from builtins import range
14 from copy import deepcopy
15 from os import chdir,getcwd,makedirs
16 from os.path import abspath,exists,join, basename
17 from re import sub,search
18 from re import compile as recompile
19 from sys import exit,stderr,version_info
20 from threading import Thread,activeCount
21 from time import sleep
22 from urllib2 import Request,build_opener,urlopen
23 
24 import sys
25 argv=sys.argv
26 from ROOT import *
27 import ROOT
28 sys.argv=argv
29 
30 gROOT.SetBatch(True)
31 
32 from .authentication import X509CertOpen
33 from .dirstructure import Comparison,Directory,tcanvas_print_processes
34 from .utils import Chi2,KS,BinToBin,Statistical_Tests,literal2root
35 
36 #-------------------------------------------------------------------------------
37 
39  """Base class for exceptions in this module."""
40  pass
41 
43  """Exception occurs in case of problems of communication with the server.
44  """
45  def __init__(self,msg):
46  self.msg = msg
47 
49 
50  def __init__(self,msg):
51  self.msg = msg
52 
53 #-----------------------------------------------------------------------------
54 
56 
57  """Communicate with the DQM Document server"""
58 
59  #-----------------------------------------------------------------------------
60 
61  base_dir='/data/json/archive/'
62 
63  def __init__(self,
64  server,
65  is_private=False,
66  ident="DQMToJson/1.0 python/%d.%d.%d" % version_info[:3]):
67  self.ident = ident
68  self.server = server
69  self.is_private = is_private
70  self.DQMpwd=DQMcommunicator.base_dir
71  self.prevDQMpwd=self.DQMpwd
72  self.opener=None
73  if not self.is_private:
74  self.opener=build_opener(X509CertOpen())
75  #-----------------------------------------------------------------------------
76 
77  def open_url(self,url):
78  url=url.replace(' ','%20')
79  datareq = Request(url)
80  datareq.add_header('User-agent', self.ident)
81  url_obj=0
82  if not self.is_private:
83  url_obj=self.opener.open(datareq)
84  #url_obj=build_opener(X509CertOpen()).open(datareq)
85  else:
86  url_obj=urlopen(datareq)
87 
88  return url_obj
89 
90  #-----------------------------------------------------------------------------
91 
92  def get_data(self, full_url):
93  #print "getting data from %s" %full_url
94  data = self.open_url(full_url).read()
95 
96  data = sub("-inf", '0', data)
97  data = sub("\s+inf", '0', data)
98  data = sub("\s+nan", '0', data)
99  data = sub('""(CMSSW.*?)""', '"\\1"', data)
100 
101  return data
102 
103  #-----------------------------------------------------------------------------
104 
105  def ls_url(self, url):
106  url=url.replace(" ","%20")
107  url=self.server+url
108  #print "listing "+url
109  form_folder={}
110  raw_folder=None
111  try:
112  raw_folder=eval(self.get_data(url))
113  except:
114  print("Retrying..")
115  for ntrials in range(5):
116  try:
117  if ntrials!=0:
118  sleep(2)
119  #raw_folder=loads(self.get_data(url))
120  raw_folder=eval(self.get_data(url))
121  break
122  except:
123  print("Could not fetch %s. Retrying" %url)
124 
125  #raw_folder=loads(self.get_data(url))
126  for content_dict in raw_folder["contents"]:
127  if "subdir" in content_dict:
128  form_folder[content_dict["subdir"]]={"type":'dir'}
129  elif "obj" in content_dict:
130  properties=content_dict["properties"]
131  obj_name=content_dict["obj"]
132  obj_type=properties["type"]
133  obj_kind=properties["kind"]
134  obj_as_string=''
135  if "rootobj" in content_dict:
136  obj_as_string=content_dict["rootobj"]
137  form_folder[obj_name]={'type':obj_type,'obj_as_string':obj_as_string,"kind":obj_kind}
138  #for k,v in form_folder.items():
139  #print "* %s --> %s" %(k,v["type"])
140 
141  return form_folder
142 
143  #-----------------------------------------------------------------------------
144 
145  def ls(self, url='', fetch_root=False):
146  if len(url)==0:
147  url=join(self.DQMpwd,url)
148 
149  form_folder={}
150 
151  if fetch_root:
152  url='%s?rootcontent=1'%url
153  form_folder=self.ls_url(url)
154 
155  return form_folder
156 
157  #-----------------------------------------------------------------------------
158 
159  def cd(self, *args):
160  len_args=len(args)
161  full_url=""
162  if len_args!=1 and len_args!=3:
163  raise InvalidNumberOfArguments
164  if len_args==3:
165  dataset, run, folder = args
166  full_url='%s/data/json/archive/%s/%s/%s' % (self.server, dataset, run, folder)
167  if len_args==1:
168  folder=args[0]
169  if folder==self.DQMpwd:
170  full_url=self.DQMpwd
171  elif folder=="..":
172  full_url=self.DQMpwd[:self.DQMpwd.rfind("/")]
173  elif folder=="-":
174  full_url=self.oldDQMpwd
175  elif folder=="":
176  full_url=DQMcommunicator.base_dir
177  else:
178  full_url=self.DQMpwd+"/"+folder
179 
180  full_url=full_url.replace(' ','%20')
181  #print "cd: "+full_url
182 
183  self.oldDQMpwd=self.DQMpwd
184  self.DQMpwd=full_url
185  #print "In %s" %self.DQMpwd
186 
187  #-----------------------------------------------------------------------------
188 
189  def get_samples(self, samples_string="*"):
190  """
191  A sample contains, among the other things, a data type, a dataset name
192  and a run.
193  """
194  full_url='%s/data/json/samples?match=%s' % (self.server, samples_string)
195  samples_dict=eval(self.get_data(full_url))
196  return samples_dict["samples"]
197 
198  #-----------------------------------------------------------------------------
199 
200  def get_datasets_list(self, dataset_string=""):
201  samples_list=self.get_samples(dataset_string)
202  datasets_list=[]
203  for sample in samples_list:
204  temp_datasets_list = map(lambda item:item["dataset"] ,sample['items'])
205  for temp_dataset in temp_datasets_list:
206  if not temp_dataset in datasets_list:
207  datasets_list.append(temp_dataset)
208  return datasets_list
209 
210  #-----------------------------------------------------------------------------
211 
212  def get_RelVal_CMSSW_versions(self,query):
213  """Get the available cmssw versions for the relvals.
214  """
215  relvals_list=self.get_datasets_list(query)
216  # The samples are of the form /RelValTHISISMYFAVOURITECHANNEL/CMSSW_VERSION/GEN-SIM-WHATEVER-RECO
217  cmssw_versions_with_duplicates=map (lambda x: x.split("/")[2],relvals_list)
218  return list(set(cmssw_versions_with_duplicates))
219 
220  #-----------------------------------------------------------------------------
221 
222  def get_runs_list(self, dataset_string):
223  slash="/"
224  while(dataset_string.endswith(slash) or dataset_string.beginswith(slash)):
225  dataset_string=dataset_string.strip("/")
226  samples_list=self.get_samples(dataset_string)
227  runlist=[]
228  # Get all the runs in all the items which are in every sample
229  map( lambda sample: map (lambda item: runlist.append(item['run']), sample['items']), samples_list)
230  return runlist
231 
232  #-----------------------------------------------------------------------------
233 
234  def get_dataset_runs(self,dataset_string):
235  dataset_runs={}
236  for dataset in self.get_datasets_list(dataset_string):
237  dataset_runs[dataset]=self.get_runs_list(dataset)
238  return dataset_runs
239 
240  #-----------------------------------------------------------------------------
241 
242  def get_common_runs(self,dataset_string1,dataset_string2):
243  set1=set(self.get_runs_list(dataset_string1))
244  set2=set(self.get_runs_list(dataset_string2))
245  set1.intersection_update(set2)
246  return list (set2)
247 
248  #-----------------------------------------------------------------------------
249 
250  def get_root_objects_list(self, url=""):
251  if len(url)==0:
252  url=self.DQMpwd
253  else:
254  url="/"+url
255  url = url.replace(" ","%20")
256  objects=[]
257  for name,description in self.ls(url,True).items():
258  if "dir" not in description["type"] and "ROOT" in description["kind"]:
259  objects.append(literal2root(description["obj_as_string"],description["type"]))
260  return objects
261 
262  #-----------------------------------------------------------------------------
263 
264  def get_root_objects(self, url=""):
265  if len(url)==0:
266  url=self.DQMpwd
267  else:
268  url=self.server+"/"+url
269  url = url.replace(" ","%20")
270  objects={}
271  for name,description in self.ls(url,True).items():
272  if "dir" not in description["type"] and "ROOT" in description["kind"]:
273  objects[name]=literal2root(description["obj_as_string"],description["type"])
274  return objects
275 
276  #-------------------------------------------------------------------------------
277 
279  null_url = (len(url)==0)
280  if len(url)==0:
281  url=self.DQMpwd
282  else:
283  url="/"+url
284  url = url.replace(" ","%20")
285  if not null_url:
286  self.cd(url)
287  objects=[]
288  for name,description in self.ls("",True).items():
289  if "dir" in description["type"]:
290  objects+=self.get_root_objects_list_recursive(name)
291  self.cd("..")
292  elif "ROOT" in description["kind"]:
293  objects.append(literal2root(description["obj_as_string"],description["type"]))
294  if not null_url:
295  self.cd("..")
296  return objects
297 
298  #-------------------------------------------------------------------------------
299 
300  def get_root_objects_names_list_recursive(self, url="",present_url=""):
301  null_url = (len(url)==0)
302  if (not null_url):
303  if len(present_url)==0:
304  present_url=url
305  else:
306  present_url+="_%s"%url
307  if len(url)==0:
308  url=self.DQMpwd
309  else:
310  url="/"+url
311  url = url.replace(" ","%20")
312  if not null_url:
313  self.cd(url)
314  objects_names=[]
315  for name,description in self.ls("",False).items():
316  if "dir" in description["type"]:
317  objects_names+=self.get_root_objects_names_list_recursive(name,present_url)
318  self.cd("..")
319  elif "ROOT" in description["kind"]:
320  objects_names.append("%s_%s"%(present_url,name))
321  if not null_url:
322  self.cd("..")
323  return objects_names
324 
325  #-------------------------------------------------------------------------------
326 
327  def get_root_objects_recursive(self, url="",present_url=""):
328  null_url = (len(url)==0)
329  if (not null_url):
330  if len(present_url)==0:
331  present_url=url
332  else:
333  present_url+="_%s"%url
334  if len(url)==0:
335  url=self.DQMpwd
336  else:
337  url="/"+url
338  url = url.replace(" ","%20")
339  #if not null_url:
340  self.cd(url)
341  objects={}
342  for name,description in self.ls("",True).items():
343  if "dir" in description["type"]:
344  objects.update(self.get_root_objects_recursive(name,present_url))
345  self.cd("..")
346  elif "ROOT" in description["kind"]:
347  objects["%s_%s"%(present_url,name)]=literal2root(description["obj_as_string"],description["type"])
348  #if not null_url:
349  self.cd("..")
350  return objects
351 
352 #-------------------------------------------------------------------------------
353 
354 class DirID(object):
355  """Structure used to identify a directory in the walked tree,
356  It carries the name and depth information.
357  """
358  def __init__(self,name,depth,mother=""):
359  self.name=name
360  self.compname=recompile(name)
361  self.mother=mother
362  self.depth=depth
363  def __eq__(self,dirid):
364  depth2=dirid.depth
365  compname2=dirid.compname
366  name2=dirid.name
367  is_equal = False
368  #if self.name in name2 or name2 in self.name:
369  if search(self.compname,name2)!=None or search(compname2,self.name)!=None:
370  is_equal = self.depth*depth2 <0 or self.depth==depth2
371  if len(self.mother)*len(dirid.mother)>0:
372  is_equal = is_equal and self.mother==dirid.mother
373  return is_equal
374 
375  def __repr__(self):
376  return "Directory %s at level %s" %(self.name,self.depth)
377 
378 #-------------------------------------------------------------------------------
379 class DirFetcher(Thread):
380  """ Fetch the content of the single "directory" in the dqm.
381  """
382  def __init__ (self,comm,directory):
383  Thread.__init__(self)
384  self.comm = comm
385  self.directory = directory
386  self.contents=None
387  def run(self):
388  self.contents = self.comm.ls(self.directory,True)
389 
390 #-------------------------------------------------------------------------------
391 
392 class DirWalkerDB(Thread):
393  """An interface to the DQM document db. It is threaded to compensate the
394  latency introduced by the finite response time of the server.
395  """
396  def __init__ (self,comm1,comm2,base1,base2,directory,depth=0,do_pngs=True,stat_test="KS",test_threshold=.5,black_list=[]):
397  Thread.__init__(self)
398  self.comm1 = deepcopy(comm1)
399  self.comm2 = deepcopy(comm2)
400  self.base1,self.base2 = base1,base2
401  self.directory = directory
402  self.depth=depth
403  self.do_pngs=do_pngs
404  self.test_threshold=test_threshold
405  self.stat_test=stat_test
406  self.black_list=black_list
407  # name of the thread
408  self.name+="_%s" %directory.name
409 
410  def run(self):
411 
412  this_dir=DirID(self.directory.name,self.depth)
413  if this_dir in self.black_list:
414  print("Skipping %s since blacklisted!" %this_dir)
415  return 0
416 
417  self.depth+=1
418 
419  the_test=Statistical_Tests[self.stat_test](self.test_threshold)
420  #print "Test %s with threshold %s" %(self.stat_test,self.test_threshold)
421 
422  directory1=self.base1+"/"+self.directory.mother_dir+"/"+self.directory.name
423  directory2=self.base2+"/"+self.directory.mother_dir+"/"+self.directory.name
424 
425  fetchers =(DirFetcher(self.comm1,directory1),DirFetcher(self.comm2,directory2))
426  for fetcher in fetchers:
427  fetcher.start()
428  for fetcher in fetchers:
429  fetcher.join()
430 
431  contents1 = fetchers[0].contents
432  contents2 = fetchers[1].contents
433  set1= set(contents1.keys())
434  set2= set(contents2.keys())
435 
436  walkers=[]
437  self_directory_directories=self.directory.subdirs
438  self_directory_comparisons=self.directory.comparisons
439  contents_names=list(set1.intersection(set2))
440 
441  for name in contents_names:
442  content = contents1[name]
443  if "dir" in content["type"]:
444  #if this_dir not in DirWalker.white_list:continue
445  subdir=Directory(name,join(self.directory.mother_dir,self.directory.name))
446  dirwalker=DirWalkerDB(self.comm1,self.comm2,self.base1,self.base2,subdir,self.depth,
447  self.do_pngs,self.stat_test,self.test_threshold,self.black_list)
448  dirwalker.start()
449  walkers.append(dirwalker)
450  n_threads=activeCount()
451  if n_threads>5:
452  #print >> stderr, "Threads that are running: %s. Joining them." %(n_threads)
453  dirwalker.join()
454  elif content["kind"]=="ROOT":
455 # print directory1,name
456  comparison=Comparison(name,
457  join(self.directory.mother_dir,self.directory.name),
458  literal2root(content["obj_as_string"],content["type"]),
459  literal2root(contents2[name]["obj_as_string"],content["type"]),
460  deepcopy(the_test),
461  do_pngs=self.do_pngs)
462  self_directory_comparisons.append(comparison)
463 
464 
465  for walker in walkers:
466  walker.join()
467  walker_directory=walker.directory
468  if not walker_directory.is_empty():
469  self_directory_directories.append(walker_directory)
470 
471 #-------------------------------------------------------------------------------
472 
474  """ Class acting as interface between the user and the harvested DQMRootFile.
475  It skips the directories created by the DQM infrastructure so to provide an
476  interface as similar as possible to a real direcory structure and to the
477  directory structure provided by the db interface.
478  """
479  def __init__(self,rootfilename):
480  dqmdatadir="DQMData"
481  self.rootfile=TFile(rootfilename)
482  self.rootfilepwd=self.rootfile.GetDirectory(dqmdatadir)
483  self.rootfileprevpwd=self.rootfile.GetDirectory(dqmdatadir)
484  if self.rootfilepwd == None:
485  print("Directory %s does not exist: skipping. Is this a custom rootfile?" %dqmdatadir)
486  self.rootfilepwd=self.rootfile
487  self.rootfileprevpwd=self.rootfile
488 
489  def __is_null(self,directory,name):
490  is_null = not directory
491  if is_null:
492  print("Directory %s does not exist!" %name, file=stderr)
493  return is_null
494 
495  def ls(self,directory_name=""):
496  contents={}
497  directory=None
498  if len(directory_name)==0:
499  directory=self.rootfilepwd
500 
501  directory=self.rootfilepwd.GetDirectory(directory_name)
502  if self.__is_null(directory,directory_name):
503  return contents
504 
505  for key in directory.GetListOfKeys():
506  contents[key.GetName()]=key.GetClassName()
507  return contents
508 
509  def cd(self,directory_name):
510  """Change the current TDirectoryFile. The familiar "-" and ".." directories
511  can be accessed as well.
512  """
513  if directory_name=="-":
514  tmp=self.rootfilepwd
515  self.rootfilepwd=self.rootfileprevpwd
516  self.rootfileprevpwd=tmp
517  if directory_name=="..":
518  #print "Setting prevpwd"
519  self.rootfileprevpwd=self.rootfilepwd
520  #print "The mom"
521  mom=self.rootfilepwd.GetMotherDir()
522  #print "In directory +%s+" %self.rootfilepwd
523  #print "Deleting the TFileDir"
524  if "Run " not in self.rootfilepwd.GetName():
525  self.rootfilepwd.Delete()
526  #print "Setting pwd to mom"
527  self.rootfilepwd=mom
528  else:
529  new_directory=self.rootfilepwd.GetDirectory(directory_name)
530  if not self.__is_null(new_directory,directory_name):
531  self.rootfileprevpwd=self.rootfilepwd
532  self.rootfilepwd=new_directory
533 
534  def getObj(self,objname):
535  """Get a TObject from the rootfile.
536  """
537  obj=self.rootfilepwd.Get(objname)
538  if not self.__is_null(obj,objname):
539  return obj
540 
541 #-------------------------------------------------------------------------------
542 
544  def __init__(self, name, topdirname,rootfilename1, rootfilename2, run=-1, black_list=[], stat_test="KS", test_threshold=.5,draw_success=True,do_pngs=False, black_list_histos=[]):
545  self.name=name
546  self.dqmrootfile1=DQMRootFile(abspath(rootfilename1))
547  self.dqmrootfile2=DQMRootFile(abspath(rootfilename2))
548  self.run=run
549  self.stat_test=Statistical_Tests[stat_test](test_threshold)
550  self.workdir=getcwd()
551  self.black_list=black_list
552  self.directory=Directory(topdirname)
553  #print "DIRWALKERFILE %s %s" %(draw_success,do_pngs)
554  self.directory.draw_success=draw_success
555  self.directory.do_pngs=do_pngs
556  self.black_list_histos = black_list_histos
558  self.filename1 = basename(rootfilename2)
559  self.filename2 = basename(rootfilename1)
560 
561  def __del__(self):
562  chdir(self.workdir)
563 
564  def cd(self,directory_name, on_disk=False, regexp=False,):
565  if regexp == True:
566  if len(directory_name)!=0:
567  if on_disk:
568  if not exists(directory_name):
569  makedirs(directory_name)
570  chdir(directory_name)
571  tmp = self.dqmrootfile2.ls().keys()
572  for elem in tmp:
573  if "Run" in elem:
574  next_dir = elem
575  self.dqmrootfile2.cd(next_dir)
576  tmp = self.dqmrootfile1.ls().keys()
577  for elem in tmp:
578  if "Run" in elem:
579  next_dir = elem
580  self.dqmrootfile1.cd(next_dir)
581  else:
582  if len(directory_name)!=0:
583  if on_disk:
584  if not exists(directory_name):
585  makedirs(directory_name)
586  chdir(directory_name)
587  self.dqmrootfile2.cd(directory_name)
588  self.dqmrootfile1.cd(directory_name)
589 
590  def ls(self,directory_name=""):
591  """Return common objects to the 2 files.
592  """
593  contents1=self.dqmrootfile1.ls(directory_name)
594  contents2=self.dqmrootfile2.ls(directory_name)
595  #print "cont1: %s"%(contents1)
596  #print "cont2: %s"%(contents2)
597  contents={}
598  self.different_histograms['file1']= {}
599  self.different_histograms['file2']= {}
600  keys = [key for key in contents2.keys() if key in contents1] #set of all possible contents from both files
601  #print " ## keys: %s" %(keys)
602  for key in keys: #iterate on all unique keys
603  if contents1[key]!=contents2[key]:
604  diff_file1 = set(contents1.keys()) - set(contents2.keys()) #set of contents that file1 is missing
605  diff_file2 = set(contents2.keys()) - set(contents1.keys()) #--'-- that file2 is missing
606  for key1 in diff_file1:
607  obj_type = contents1[key1]
608  if obj_type == "TDirectoryFile":
609  self.different_histograms['file1'][key1] = contents1[key1] #if direcory
610  #print "\n Missing inside a dir: ", self.ls(key1)
611  #contents[key] = contents1[key1]
612  if obj_type[:2]!="TH" and obj_type[:3]!="TPr" : #if histogram
613  continue
614  self.different_histograms['file1'][key1] = contents1[key1]
615  for key1 in diff_file2:
616  obj_type = contents2[key1]
617  if obj_type == "TDirectoryFile":
618  self.different_histograms['file2'][key1] = contents2[key1] #if direcory
619  #print "\n Missing inside a dir: ", self.ls(key1)
620  #contents[key] = contents2[key1]
621  if obj_type[:2]!="TH" and obj_type[:3]!="TPr" : #if histogram
622  continue
623  self.different_histograms['file2'][key1] = contents2[key1]
624  contents[key]=contents1[key]
625  return contents
626 
627  def getObjs(self,name):
628  h1=self.dqmrootfile1.getObj(name)
629  h2=self.dqmrootfile2.getObj(name)
630  return h1,h2
631 
632  def __fill_single_dir(self,dir_name,directory,mother_name="",depth=0):
633  #print "MOTHER NAME = +%s+" %mother_name
634  #print "About to study %s (in dir %s)" %(dir_name,getcwd())
635 
636  # see if in black_list
637  this_dir=DirID(dir_name,depth)
638  #print " ## this_dir: %s"%(this_dir)
639  if this_dir in self.black_list:
640  #print "Directory %s skipped because black-listed" %dir_name
641  return 0
642 
643  depth+=1
644 
645  self.cd(dir_name)
646  #if dir_name == 'HLTJETMET':
647  # print self.ls()
648 
649  #print "Test %s with thre %s" %(self.stat_test.name, self.stat_test.threshold)
650 
651  contents=self.ls()
652  if depth==1:
653  n_top_contents=len(contents)
654 
655  #print contents
656  cont_counter=1
657  comparisons=[]
658  for name,obj_type in contents.items():
659  if obj_type=="TDirectoryFile":
660  #We have a dir, launch recursion!
661  #Some feedback on the progress
662  if depth==1:
663  print("Studying directory %s, %s/%s" %(name,cont_counter,n_top_contents))
664  cont_counter+=1
665 
666  #print "Studying directory",name
667  # ok recursion on!
668  subdir=Directory(name)
669  subdir.draw_success=directory.draw_success
670  subdir.do_pngs=directory.do_pngs
671  self.__fill_single_dir(name,subdir,join(mother_name,dir_name),depth)
672  if not subdir.is_empty():
673  if depth==1:
674  print(" ->Appending %s..." %name, end=' ')
675  directory.subdirs.append(subdir)
676  if depth==1:
677  print("Appended.")
678  else:
679  # We have probably an histo. Let's make the plot and the png.
680  if obj_type[:2]!="TH" and obj_type[:3]!="TPr" :
681  continue
682  h1,h2=self.getObjs(name)
683  #print "COMPARISON : +%s+%s+" %(mother_name,dir_name)
684  path = join(mother_name,dir_name,name)
685  if path in self.black_list_histos:
686  print(" Skipping %s" %(path))
687  directory.comparisons.append(Comparison(name,
688  join(mother_name,dir_name),
689  h1,h2,
690  deepcopy(self.stat_test),
691  draw_success=directory.draw_success,
692  do_pngs=directory.do_pngs, skip=True))
693  else:
694  directory.comparisons.append(Comparison(name,
695  join(mother_name,dir_name),
696  h1,h2,
697  deepcopy(self.stat_test),
698  draw_success=directory.draw_success,
699  do_pngs=directory.do_pngs, skip=False))
700  directory.filename1 = self.filename1
701  directory.filename2 = self.filename2
702  directory.different_histograms['file1'] = self.different_histograms['file1']
703  directory.different_histograms['file2'] = self.different_histograms['file2']
704 
705  self.cd("..")
706 
707  def walk(self):
708  # Build the top dir in the rootfile first
709  rundir=""
710  if self.run<0:
711  # change dir in the first one...
712  #print self.ls().keys()
713  first_run_dir = ""
714  try:
715  first_run_dir = filter(lambda k: "Run " in k, self.ls().keys())[0]
716  except:
717  print("\nRundir not there: Is this a generic rootfile?\n")
718  rundir=first_run_dir
719  try:
720  self.run= int(rundir.split(" ")[1])
721  except:
722  print("Setting run number to 0")
723  self.run= 0
724  else:
725  rundir="Run %s"%self.run
726 
727  try:
728  self.cd(rundir, False, True) #True -> for checking the Rundir in case of different runs
729  except:
730  print("\nRundir not there: Is this a generic rootfile?\n")
731 
732  # Let's rock!
733  self.__fill_single_dir(self.directory.name,self.directory)
734  print("Finished")
735  n_left_threads=len(tcanvas_print_processes)
736  if n_left_threads>0:
737  print("Waiting for %s threads to finish..." %n_left_threads)
738  for p in tcanvas_print_processes:
739  p.join()
740 
741 #-------------------------------------------------------------------------------
742 
744  def __init__(self, walker):
745  Thread.__init__(self)
746  self.walker=walker
747  def run(self):
748  self.walker.walk()
749 
750 #-------------------------------------------------------------------------------
751 
752 def string2blacklist(black_list_str):
753  black_list=[]
754  # replace the + with " ":
755  black_list_str=black_list_str.replace("__"," ")
756  if len(black_list_str)>0:
757  for ele in black_list_str.split(","):
758  dirname,level=ele.split("@")
759  level=int(level)
760  dirid=None
761  if "/" not in dirname:
762  dirid=DirID(dirname,level)
763  else:
764  mother,daughter=dirname.split("/")
765  dirid=DirID(daughter,level,mother)
766  if not dirid in black_list:
767  black_list.append(dirid)
768 
769  return black_list
770 
771 #-------------------------------------------------------------------------------
772 
def get_datasets_list(self, dataset_string="")
def __init__(self, name, depth, mother="")
def __fill_single_dir(self, dir_name, directory, mother_name="", depth=0)
std::vector< T >::const_iterator search(const cond::Time_t &val, const std::vector< T > &container)
Definition: IOVProxy.cc:314
def literal2root(literal, rootType, debug=False)
Definition: ROOTData.py:53
def ls(self, url='', fetch_root=False)
def __init__(self, comm1, comm2, base1, base2, directory, depth=0, do_pngs=True, stat_test="KS", test_threshold=.5, black_list=[])
def get_data(self, full_url)
S & print(S &os, JobReport::InputFile const &f)
Definition: JobReport.cc:66
def __init__(self, comm, directory)
def get_root_objects(self, url="")
def __init__(self, name, topdirname, rootfilename1, rootfilename2, run=-1, black_list=[], stat_test="KS", test_threshold=.5, draw_success=True, do_pngs=False, black_list_histos=[])
def get_root_objects_names_list_recursive(self, url="", present_url="")
def get_common_runs(self, dataset_string1, dataset_string2)
def get_dataset_runs(self, dataset_string)
def get_root_objects_recursive(self, url="", present_url="")
abspath
Definition: dataset.py:55
def cd(self, directory_name, on_disk=False, regexp=False)
def __is_null(self, directory, name)
def ls(self, directory_name="")
def ls(self, directory_name="")
def getObj(self, objname)
def cd(self, directory_name)
def get_root_objects_list_recursive(self, url="")
static std::string join(char **cmd)
Definition: RemoteFile.cc:18
def get_runs_list(self, dataset_string)
def __init__(self, server, is_private=False, ident="DQMToJson/1.0 python/%d.%d.%d"%version_info[:3])
def get_root_objects_list(self, url="")
def get_RelVal_CMSSW_versions(self, query)
def __eq__(self, dirid)
def get_samples(self, samples_string="*")
def __init__(self, rootfilename)
def string2blacklist(black_list_str)
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run