test
CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
edmIntegrityCheck.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 
3 from PhysicsTools.HeppyCore.utils.edmIntegrityCheck import PublishToFileSystem, IntegrityCheck
4 import das
5 
6 import copy, os
7 
8 if __name__ == '__main__':
9 
10  from optparse import OptionParser, OptionGroup
11 
12  usage = """usage: %prog [options] /Sample/Name/On/Castor
13 
14 e.g.: %prog -u wreece -p -w 'PFAOD_*.root' /MultiJet/Run2011A-05Aug2011-v1/AOD/V2
15  """
16  das = das.DASOptionParser(usage=usage)
17  group = OptionGroup(das.parser,'edmIntegrityCheck Options','Options related to checking files on CASTOR')
18 
19  group.add_option("-d", "--device", dest="device", default='cmst3',help="The storage device to write to, e.g. 'cmst3'")
20  group.add_option("-n", "--name", dest="name", default=None,help='The name of the dataset in DAS. Will be guessed if not specified')
21  group.add_option("-p", "--printout", dest="printout", default=False, action='store_true',help='Print a report to stdout')
22  group.add_option("-r", "--recursive", dest="resursive", default=False, action='store_true',help='Walk the mass storage device recursively')
23  group.add_option("-u", "--user", dest="user", default=os.environ['USER'],help='The username to use when looking at mass storage devices')
24  group.add_option("-w", "--wildcard", dest="wildcard", default=None,help='A UNIX style wildcard to specify which files to check')
25  group.add_option("--update", dest="update", default=False, action='store_true',help='Only update the status of corrupted files')
26  group.add_option("-t","--timeout", dest="timeout", default=-1, type=int, help='Set a timeout on the edmFileUtil calls')
27  group.add_option("--min-run", dest="min_run", default=-1, type=int, help='When querying DBS, require runs >= than this run')
28  group.add_option("--max-run", dest="max_run", default=-1, type=int, help='When querying DBS, require runs <= than this run')
29  group.add_option("--max_threads", dest="max_threads", default=None,help='The maximum number of threads to use')
30  das.parser.add_option_group(group)
31  (opts, datasets) = das.get_opt()
32 
33  if len(datasets)==0:
34  print das.parser.print_help()
35  print
36  print 'need to provide a dataset in argument'
37 
38  def work(d,op):
39  tokens = d.split('%')
40  if len(tokens) == 2:
41  op.user = tokens[0]
42  d = tokens[1]
43 
44  check = IntegrityCheck(d,op)
45  pub = PublishToFileSystem(check)
46 
47  previous = None
48  if op.update:
49  previous = pub.get(check.directory)
50 
51  check.test(previous = previous, timeout = op.timeout)
52  if op.printout:
53  check.report()
54  report = check.structured()
55  pub.publish(report)
56 
57  return d
58 
59  def callback(result):
60  print 'Checking thread done: ',str(result)
61 
62  #submit the main work in a multi-threaded way
63 
64  if len(datasets) == 1:
65  d = datasets[0]
66  work(d, copy.deepcopy(opts))
67  else:
68  import multiprocessing
69  if opts.max_threads is not None and opts.max_threads:
70  opts.max_threads = int(opts.max_threads)
71  pool = multiprocessing.Pool(processes=opts.max_threads)
72 
73  for d in datasets:
74  pool.apply_async(work, args=(d,copy.deepcopy(opts)),callback=callback)
75  pool.close()
76  pool.join()