test
CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
indexGenCompare.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 
3 
4 import shutil, sys, os, re, valtools, string
5 
6 from string import Template
7 
8 from optparse import OptionParser
9 from subprocess import Popen,PIPE
10 
11 
12 def processBenchmark( path, outputRootFile ):
13  (release, bname, extension ) = valtools.decodePath( path )
14  if bname != webpage.benchmarkName_:
15  print "sorry, you have to go to the",bname,"directory to produce this comparison. Note that you cannot compare different benchmarks."
16  sys.exit(4)
17  benchmark = valtools.benchmark( extension )
18  benchmark.release_ = release
19  print benchmark.benchmarkUrl( website )
20  root = benchmark.rootFileOnWebSite( website )
21  shutil.copy(root, outputRootFile)
22  print 'retrieved ', root
23  return benchmark
24 
25 
26 webpage = valtools.webpage()
27 webpage.parser_.usage = "example: %prog CMSSW_3_1_0_pre7/TauBenchmarkGeneric_ZTT_FastSim_IDEAL CMSSW_3_1_0_pre7/TauBenchmarkGeneric_TEST\nThe list of benchmarks can be obtained using the listBenchmarks.py command."
28 webpage.parser_.add_option("-m", "--macro", dest="macro",
29  help="specify the ROOT macro to be used for comparison. If empty, skip the plotting stage", default="compare.C")
30 webpage.parser_.add_option("-s", "--submit", dest="submit",
31  action="store_true",
32  help="submit the comparison to the web site",
33  default=False)
34 webpage.parser_.add_option("-S", "--submit-force", dest="submitForce",
35  action="store_true",
36  help="force the submission of the comparison to the web site",
37  default=False)
38 webpage.parseArgs()
39 
40 if len(webpage.args_)!=2:
41  webpage.parser_.print_help()
42  sys.exit(1)
43 
44 
45 website = valtools.website()
46 
47 macro = webpage.options_.macro
48 templateFile = 'indexCompare.html'
49 indexhtml = "%s/%s" % (webpage.templates_,templateFile)
50 
51 # setting up benchmarks
52 print
53 benchmark1 = processBenchmark( webpage.args_[0],
54  'benchmark_0.root' )
55 print
56 benchmark2 = processBenchmark( webpage.args_[1],
57  'benchmark_1.root' )
58 
59 
60 webpage.setOutputDir(benchmark2.fullName())
61 
62 # do the plots
63 if webpage.options_.macro != "":
64  os.system('root -b ' + macro)
65 
66 valtools.testFileType(indexhtml, ".html")
67 infonotfoundhtml = "%s/%s" % (webpage.templates_,"infoNotFound.html")
68 valtools.testFileType(infonotfoundhtml, ".html")
69 
70 images = webpage.readCaptions('c_captions.txt')
71 
72 title = webpage.benchmarkName_
73 
74 benchmark1Link = benchmark1.benchmarkUrl( website )
75 benchmark1Name = benchmark1.fullName()
76 
77 benchmark2Link = benchmark2.benchmarkUrl( website )
78 benchmark2Name = benchmark2.fullName()
79 
80 macroLink = valtools.processFile( macro, webpage.outputDir_ )
81 macroName = os.path.basename(macro)
82 
83 comments = webpage.options_.comments
84 username = os.environ['USER']
85 
86 ifile = open( indexhtml )
87 indexTemplate = ifile.read()
88 s = Template(indexTemplate)
89 subst = s.substitute(title = title,
90  benchmark1Link = benchmark1Link,
91  benchmark1Name = benchmark1Name,
92  benchmark2Link = benchmark2Link,
93  benchmark2Name = benchmark2Name,
94  macroLink = macroLink,
95  macroName = macroName,
96  comments = comments,
97  images = images,
98  username = username,
99  date = webpage.date_
100  )
101 ofile = open( '%s/index.html' % webpage.outputDir_, 'w' )
102 ofile.write( subst )
103 ofile.close()
104 
105 ifile = open( infonotfoundhtml )
106 infoNotFoundTemplate = ifile.read()
107 s2 = Template(infoNotFoundTemplate)
108 subst2 = s2.substitute( username = os.environ['USER'] )
109 ofile2 = open( '%s/infoNotFound.html' % webpage.outputDir_, 'w' )
110 ofile2.write( subst2 )
111 ofile2.close()
112 
113 # if submission is forced, it means that the user does want
114 # to submit.
115 if webpage.options_.submitForce:
116  webpage.options_.submit = True
117 
118 if (webpage.options_.submit == True):
119  remoteName = benchmark1.benchmarkOnWebSite(website) + '/' + webpage.outputDir_
120  comparison = valtools.comparison( benchmark1, webpage.outputDir_)
121  comparison.submit( website,
122  webpage.options_.submitForce)
123  benchmark1.addLinkToComparison( website, comparison )
124  benchmark2.addLinkToComparison( website, comparison )
125 
126 
def decodePath
Definition: valtools.py:274
def processFile
Definition: valtools.py:304
def testFileType
Definition: valtools.py:286