11 from copy
import deepcopy
12 from os
import chdir,getcwd,makedirs
13 from os.path
import abspath,exists,join, basename
14 from re
import sub,search
15 from re
import compile
as recompile
16 from sys
import exit,stderr,version_info
17 from threading
import Thread,activeCount
18 from time
import sleep
19 from urllib2
import Request,build_opener,urlopen
29 from authentication
import X509CertOpen
30 from dirstructure
import Comparison,Directory,tcanvas_print_processes
31 from utils
import Chi2,KS,BinToBin,Statistical_Tests,literal2root
36 """Base class for exceptions in this module.""" 40 """Exception occurs in case of problems of communication with the server. 54 """Communicate with the DQM Document server""" 58 base_dir=
'/data/json/archive/' 63 ident=
"DQMToJson/1.0 python/%d.%d.%d" % version_info[:3]):
75 url=url.replace(
' ',
'%20')
76 datareq = Request(url)
77 datareq.add_header(
'User-agent', self.
ident)
80 url_obj=self.opener.open(datareq)
83 url_obj=urlopen(datareq)
91 data = self.
open_url(full_url).read()
93 data = sub(
"-inf",
'0', data)
94 data = sub(
"\s+inf",
'0', data)
95 data = sub(
"\s+nan",
'0', data)
96 data = sub(
'""(CMSSW.*?)""',
'"\\1"', data)
103 url=url.replace(
" ",
"%20")
112 for ntrials
in xrange(5):
120 print "Could not fetch %s. Retrying" %url
123 for content_dict
in raw_folder[
"contents"]:
124 if "subdir" in content_dict:
125 form_folder[content_dict[
"subdir"]]={
"type":
'dir'}
126 elif "obj" in content_dict:
127 properties=content_dict[
"properties"]
128 obj_name=content_dict[
"obj"]
129 obj_type=properties[
"type"]
130 obj_kind=properties[
"kind"]
132 if "rootobj" in content_dict:
133 obj_as_string=content_dict[
"rootobj"]
134 form_folder[obj_name]={
'type':obj_type,
'obj_as_string':obj_as_string,
"kind":obj_kind}
142 def ls(self, url='', fetch_root=False):
149 url=
'%s?rootcontent=1'%url
150 form_folder=self.
ls_url(url)
159 if len_args!=1
and len_args!=3:
160 raise InvalidNumberOfArguments
162 dataset, run, folder = args
163 full_url=
'%s/data/json/archive/%s/%s/%s' % (self.
server, dataset, run, folder)
169 full_url=self.
DQMpwd[:self.DQMpwd.rfind(
"/")]
173 full_url=DQMcommunicator.base_dir
175 full_url=self.
DQMpwd+
"/"+folder
177 full_url=full_url.replace(
' ',
'%20')
188 A sample contains, among the other things, a data type, a dataset name 191 full_url=
'%s/data/json/samples?match=%s' % (self.
server, samples_string)
192 samples_dict=eval(self.
get_data(full_url))
193 return samples_dict[
"samples"]
200 for sample
in samples_list:
201 temp_datasets_list =
map(
lambda item:item[
"dataset"] ,sample[
'items'])
202 for temp_dataset
in temp_datasets_list:
203 if not temp_dataset
in datasets_list:
204 datasets_list.append(temp_dataset)
210 """Get the available cmssw versions for the relvals. 214 cmssw_versions_with_duplicates=map (
lambda x: x.split(
"/")[2],relvals_list)
215 return list(set(cmssw_versions_with_duplicates))
221 while(dataset_string.endswith(slash)
or dataset_string.beginswith(slash)):
222 dataset_string=dataset_string.strip(
"/")
226 map(
lambda sample: map (
lambda item: runlist.append(item[
'run']), sample[
'items']), samples_list)
242 set1.intersection_update(set2)
252 url = url.replace(
" ",
"%20")
254 for name,description
in self.
ls(url,
True).
items():
255 if "dir" not in description[
"type"]
and "ROOT" in description[
"kind"]:
256 objects.append(
literal2root(description[
"obj_as_string"],description[
"type"]))
266 url = url.replace(
" ",
"%20")
268 for name,description
in self.
ls(url,
True).
items():
269 if "dir" not in description[
"type"]
and "ROOT" in description[
"kind"]:
270 objects[name]=
literal2root(description[
"obj_as_string"],description[
"type"])
276 null_url = (len(url)==0)
281 url = url.replace(
" ",
"%20")
285 for name,description
in self.
ls(
"",
True).
items():
286 if "dir" in description[
"type"]:
289 elif "ROOT" in description[
"kind"]:
290 objects.append(
literal2root(description[
"obj_as_string"],description[
"type"]))
298 null_url = (len(url)==0)
300 if len(present_url)==0:
303 present_url+=
"_%s"%url
308 url = url.replace(
" ",
"%20")
312 for name,description
in self.
ls(
"",
False).
items():
313 if "dir" in description[
"type"]:
316 elif "ROOT" in description[
"kind"]:
317 objects_names.append(
"%s_%s"%(present_url,name))
325 null_url = (len(url)==0)
327 if len(present_url)==0:
330 present_url+=
"_%s"%url
335 url = url.replace(
" ",
"%20")
339 for name,description
in self.
ls(
"",
True).
items():
340 if "dir" in description[
"type"]:
343 elif "ROOT" in description[
"kind"]:
344 objects[
"%s_%s"%(present_url,name)]=
literal2root(description[
"obj_as_string"],description[
"type"])
352 """Structure used to identify a directory in the walked tree, 353 It carries the name and depth information. 362 compname2=dirid.compname
367 is_equal = self.
depth*depth2 <0
or self.
depth==depth2
368 if len(self.
mother)*len(dirid.mother)>0:
369 is_equal = is_equal
and self.
mother==dirid.mother
373 return "Directory %s at level %s" %(self.
name,self.
depth)
377 """ Fetch the content of the single "directory" in the dqm. 380 Thread.__init__(self)
390 """An interface to the DQM document db. It is threaded to compensate the 391 latency introduced by the finite response time of the server. 393 def __init__ (self,comm1,comm2,base1,base2,directory,depth=0,do_pngs=True,stat_test="KS",test_threshold=.5,black_list=[]):
394 Thread.__init__(self)
405 self.name+=
"_%s" %directory.name
409 this_dir=
DirID(self.directory.name,self.
depth)
411 print "Skipping %s since blacklisted!" %this_dir
419 directory1=self.base1+
"/"+self.directory.mother_dir+
"/"+self.directory.name
420 directory2=self.
base2+
"/"+self.directory.mother_dir+
"/"+self.directory.name
423 for fetcher
in fetchers:
425 for fetcher
in fetchers:
428 contents1 = fetchers[0].contents
429 contents2 = fetchers[1].contents
430 set1= set(contents1.keys())
431 set2= set(contents2.keys())
434 self_directory_directories=self.directory.subdirs
435 self_directory_comparisons=self.directory.comparisons
436 contents_names=
list(set1.intersection(set2))
438 for name
in contents_names:
439 content = contents1[name]
440 if "dir" in content[
"type"]:
442 subdir=
Directory(name,
join(self.directory.mother_dir,self.directory.name))
446 walkers.append(dirwalker)
447 n_threads=activeCount()
451 elif content[
"kind"]==
"ROOT":
454 join(self.directory.mother_dir,self.directory.name),
456 literal2root(contents2[name][
"obj_as_string"],content[
"type"]),
459 self_directory_comparisons.append(comparison)
462 for walker
in walkers:
464 walker_directory=walker.directory
465 if not walker_directory.is_empty():
466 self_directory_directories.append(walker_directory)
471 """ Class acting as interface between the user and the harvested DQMRootFile. 472 It skips the directories created by the DQM infrastructure so to provide an 473 interface as similar as possible to a real direcory structure and to the 474 directory structure provided by the db interface. 482 print "Directory %s does not exist: skipping. Is this a custom rootfile?" %dqmdatadir
487 is_null =
not directory
489 print >> stderr,
"Directory %s does not exist!" %name
492 def ls(self,directory_name=""):
495 if len(directory_name)==0:
498 directory=self.rootfilepwd.GetDirectory(directory_name)
499 if self.
__is_null(directory,directory_name):
502 for key
in directory.GetListOfKeys():
503 contents[key.GetName()]=key.GetClassName()
506 def cd(self,directory_name):
507 """Change the current TDirectoryFile. The familiar "-" and ".." directories 508 can be accessed as well. 510 if directory_name==
"-":
514 if directory_name==
"..":
518 mom=self.rootfilepwd.GetMotherDir()
521 if "Run " not in self.rootfilepwd.GetName():
522 self.rootfilepwd.Delete()
526 new_directory=self.rootfilepwd.GetDirectory(directory_name)
527 if not self.
__is_null(new_directory,directory_name):
532 """Get a TObject from the rootfile. 534 obj=self.rootfilepwd.Get(objname)
541 def __init__(self, name, topdirname,rootfilename1, rootfilename2, run=-1, black_list=[], stat_test="KS", test_threshold=.5,draw_success=True,do_pngs=False, black_list_histos=[]):
546 self.
stat_test=Statistical_Tests[stat_test](test_threshold)
551 self.directory.draw_success=draw_success
552 self.directory.do_pngs=do_pngs
561 def cd(self,directory_name, on_disk=False, regexp=False,):
563 if len(directory_name)!=0:
565 if not exists(directory_name):
566 makedirs(directory_name)
567 chdir(directory_name)
568 tmp = self.dqmrootfile2.ls().
keys()
572 self.dqmrootfile2.cd(next_dir)
573 tmp = self.dqmrootfile1.ls().
keys()
577 self.dqmrootfile1.cd(next_dir)
579 if len(directory_name)!=0:
581 if not exists(directory_name):
582 makedirs(directory_name)
583 chdir(directory_name)
584 self.dqmrootfile2.cd(directory_name)
585 self.dqmrootfile1.cd(directory_name)
587 def ls(self,directory_name=""):
588 """Return common objects to the 2 files. 590 contents1=self.dqmrootfile1.ls(directory_name)
591 contents2=self.dqmrootfile2.ls(directory_name)
597 keys = [key
for key
in contents2.keys()
if key
in contents1]
600 if contents1[key]!=contents2[key]:
601 diff_file1 = set(contents1.keys()) - set(contents2.keys())
602 diff_file2 = set(contents2.keys()) - set(contents1.keys())
603 for key1
in diff_file1:
604 obj_type = contents1[key1]
605 if obj_type ==
"TDirectoryFile":
609 if obj_type[:2]!=
"TH" and obj_type[:3]!=
"TPr" :
612 for key1
in diff_file2:
613 obj_type = contents2[key1]
614 if obj_type ==
"TDirectoryFile":
618 if obj_type[:2]!=
"TH" and obj_type[:3]!=
"TPr" :
621 contents[key]=contents1[key]
625 h1=self.dqmrootfile1.getObj(name)
626 h2=self.dqmrootfile2.getObj(name)
634 this_dir=DirID(dir_name,depth)
636 if this_dir
in self.black_list:
650 n_top_contents=len(contents)
655 for name,obj_type
in contents.items():
656 if obj_type==
"TDirectoryFile":
660 print "Studying directory %s, %s/%s" %(name,cont_counter,n_top_contents)
665 subdir=Directory(name)
666 subdir.draw_success=directory.draw_success
667 subdir.do_pngs=directory.do_pngs
668 self.__fill_single_dir(name,subdir,
join(mother_name,dir_name),depth)
669 if not subdir.is_empty():
671 print " ->Appending %s..." %name,
672 directory.subdirs.append(subdir)
677 if obj_type[:2]!=
"TH" and obj_type[:3]!=
"TPr" :
679 h1,h2=self.getObjs(name)
681 path =
join(mother_name,dir_name,name)
682 if path
in self.black_list_histos:
683 print " Skipping %s" %(path)
684 directory.comparisons.append(Comparison(name,
685 join(mother_name,dir_name),
687 deepcopy(self.stat_test),
688 draw_success=directory.draw_success,
689 do_pngs=directory.do_pngs, skip=
True))
691 directory.comparisons.append(Comparison(name,
692 join(mother_name,dir_name),
694 deepcopy(self.stat_test),
695 draw_success=directory.draw_success,
696 do_pngs=directory.do_pngs, skip=
False))
697 directory.filename1 = self.filename1
698 directory.filename2 = self.filename2
699 directory.different_histograms[
'file1'] = self.different_histograms[
'file1']
700 directory.different_histograms[
'file2'] = self.different_histograms[
'file2']
712 first_run_dir =
filter(
lambda k:
"Run " in k, self.
ls().
keys())[0]
714 print "\nRundir not there: Is this a generic rootfile?\n" 717 self.
run=
int(rundir.split(
" ")[1])
719 print "Setting run number to 0" 722 rundir=
"Run %s"%self.
run 725 self.
cd(rundir,
False,
True)
727 print "\nRundir not there: Is this a generic rootfile?\n" 732 n_left_threads=len(tcanvas_print_processes)
734 print "Waiting for %s threads to finish..." %n_left_threads
735 for p
in tcanvas_print_processes:
742 Thread.__init__(self)
752 black_list_str=black_list_str.replace(
"__",
" ")
753 if len(black_list_str)>0:
754 for ele
in black_list_str.split(
","):
755 dirname,level=ele.split(
"@")
758 if "/" not in dirname:
759 dirid=
DirID(dirname,level)
761 mother,daughter=dirname.split(
"/")
762 dirid=
DirID(daughter,level,mother)
763 if not dirid
in black_list:
764 black_list.append(dirid)
def get_datasets_list(self, dataset_string="")
def __init__(self, name, depth, mother="")
def __fill_single_dir(self, dir_name, directory, mother_name="", depth=0)
std::vector< T >::const_iterator search(const cond::Time_t &val, const std::vector< T > &container)
def literal2root(literal, rootType, debug=False)
def ls(self, url='', fetch_root=False)
def __init__(self, comm1, comm2, base1, base2, directory, depth=0, do_pngs=True, stat_test="KS", test_threshold=.5, black_list=[])
def get_data(self, full_url)
def __init__(self, comm, directory)
def get_root_objects(self, url="")
def __init__(self, name, topdirname, rootfilename1, rootfilename2, run=-1, black_list=[], stat_test="KS", test_threshold=.5, draw_success=True, do_pngs=False, black_list_histos=[])
def get_root_objects_names_list_recursive(self, url="", present_url="")
def get_common_runs(self, dataset_string1, dataset_string2)
def get_dataset_runs(self, dataset_string)
def get_root_objects_recursive(self, url="", present_url="")
def cd(self, directory_name, on_disk=False, regexp=False)
def __is_null(self, directory, name)
def ls(self, directory_name="")
def ls(self, directory_name="")
def getObj(self, objname)
def cd(self, directory_name)
def get_root_objects_list_recursive(self, url="")
static std::string join(char **cmd)
def get_runs_list(self, dataset_string)
def __init__(self, server, is_private=False, ident="DQMToJson/1.0 python/%d.%d.%d"%version_info[:3])
def __init__(self, walker)
def get_root_objects_list(self, url="")
def get_RelVal_CMSSW_versions(self, query)
def get_samples(self, samples_string="*")
def __init__(self, rootfilename)
def string2blacklist(black_list_str)
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run