CMS 3D CMS Logo

mps_alisetup.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 
3 from __future__ import print_function
4 import os
5 import re
6 import sys
7 if sys.version_info[0]>2:
8  import _pickle as cPickle
9 else:
10  import cPickle
11 import argparse
12 import itertools
13 import subprocess
14 import collections
15 import configparser as ConfigParser
16 import Alignment.MillePedeAlignmentAlgorithm.mpslib.tools as mps_tools
17 import Alignment.MillePedeAlignmentAlgorithm.mpslib.Mpslibclass as mpslib
18 import Alignment.MillePedeAlignmentAlgorithm.mpsvalidate.iniparser as mpsv_iniparser
19 import Alignment.MillePedeAlignmentAlgorithm.mpsvalidate.trackerTree as mpsv_trackerTree
20 from Alignment.MillePedeAlignmentAlgorithm.alignmentsetup.helper import checked_out_MPS
21 from functools import reduce
22 
23 import six
24 
25 
26 def main(argv = None):
27  """Main routine. Not called, if this module is loaded via `import`.
28 
29  Arguments:
30  - `argv`: Command line arguments passed to the script.
31  """
32 
33  if argv == None:
34  argv = sys.argv[1:]
35 
36  setup_alignment = SetupAlignment(argv)
37  setup_alignment.setup()
38 
39 
40 
42  """Class encapsulating the alignment campaign setup procedure."""
43 
44  def __init__(self, argv):
45  """Constructor
46 
47  Arguments:
48  - `argv`: command line arguments
49  """
50 
51  self._argv = argv # raw command line arguments
52  self._args = None # parsed command line arguments
53  self._config = None # ConfigParser object
54  self._mss_dir = None # mass storage directory
55  self._datasets = collections.OrderedDict() # final dataset configs
56  self._first_run = None # first run for start geometry
57  self._cms_process = None # cms.Process extracted from CMSSW config
58  self._override_gt = None # snippet to append to config
59  self._pede_script = None # path to pede batch script template
60  self._weight_dict = collections.OrderedDict() # dictionary with dataset weights
61  self._mille_script = None # path to mille batch script template
62  self._mps_dir_name = None # MP campaign name (mp<ID>)
63  self._common_weights = {} # dictionary with dataset weights from [weights] section
64  self._weight_configs = [] # list with combinations of dataset weights
65  self._general_options = {} # general options extracted from ini file
66  self._external_datasets = collections.OrderedDict() # external dataset configs
67  self._first_pede_config = True # does a pede job exist already?
68 
69  self._create_config()
71  self._fetch_datasets()
72  self._construct_paths()
76 
77 
78  def setup(self):
79  """Setup the alignment campaign."""
80 
81  if self._args.weight:
83  else:
84  self._create_mille_jobs()
85  self._create_pede_jobs()
86 
87  if self._override_gt.strip() != "":
88  msg = ("Overriding global tag with single-IOV tags extracted from "
89  "'{}' for run number '{}'.".format(self._global_tag,
90  self._first_run))
91  print(msg)
92  print("-"*75)
93  print(self._override_gt)
94  print("="*75)
95 
96 
97  def _create_config(self):
98  """Create ConfigParser object from command line arguments."""
99 
100  helpEpilog ="""Builds the config-templates from a universal
101  config-template for each dataset specified in .ini-file that is passed
102  to this script. Then calls mps_setup.py for all datasets."""
103  parser = argparse.ArgumentParser(
104  description = ("Setup the alignment as configured in the "
105  "alignment_config file."),
106  epilog = helpEpilog)
107  parser.add_argument("-v", "--verbose", action="store_true",
108  help="display detailed output of mps_setup")
109  parser.add_argument("-w", "--weight", action="store_true",
110  help=("creates additional merge job(s) with "
111  "(possibly new) weights from .ini-config"))
112  parser.add_argument("alignmentConfig",
113  help=("name of the .ini config file that specifies "
114  "the datasets to be used"))
115 
116  self._args = parser.parse_args(self._argv)
117  self._config = ConfigParser.ConfigParser()
118  self._config.optionxform = str # default would give lowercase options
119  # -> not wanted
120  self._config.read(self._args.alignmentConfig)
121  self._config.config_path = self._args.alignmentConfig
122 
123 
124  def _construct_paths(self):
125  """Determine directory paths and create the ones that are needed."""
126 
127  mpsTemplates = os.path.join("src", "Alignment",
128  "MillePedeAlignmentAlgorithm", "templates")
129  if checked_out_MPS()[0]:
130  mpsTemplates = os.path.join(os.environ["CMSSW_BASE"], mpsTemplates)
131  else:
132  mpsTemplates = os.path.join(os.environ["CMSSW_RELEASE_BASE"], mpsTemplates)
133  self._mille_script = os.path.join(mpsTemplates, "mps_runMille_template.sh")
134  self._pede_script = os.path.join(mpsTemplates, "mps_runPede_rfcp_template.sh")
135 
136  # get working directory name
137  currentDir = os.getcwd()
138  match = re.search(re.compile('mpproduction\/mp(.+?)$', re.M|re.I),currentDir)
139  if match:
140  self._mps_dir_name = 'mp'+match.group(1)
141  else:
142  print("Current location does not seem to be a MillePede campaign directory:", end=' ')
143  print(currentDir)
144  sys.exit(1)
145 
146 
148  """Create and fill `general_options` dictionary."""
149 
150  print("="*75)
153  self._fetch_essentials()
154  self._fetch_defaults()
155 
156 
158  """Fetch information about external datasets."""
159 
160  if self._config.has_option("general", "externalDatasets"):
161  datasets = map(lambda x: x.strip(),
162  self._config.get("general",
163  "externalDatasets").split(","))
164  datasets = [x for x in datasets if len(x.strip()) > 0]
165  for item in datasets:
166  splitted = item.split("|")
167  dataset = splitted[0].strip()
168  dataset = os.path.expandvars(dataset)
169 
170  weight = splitted[1] if len(splitted) > 1 else None
171  config = ConfigParser.ConfigParser()
172  config.optionxform = str
173  config.read(dataset)
174  config.config_path = dataset
175  self._external_datasets[dataset] = {"config": config,
176  "weight": weight}
177 
178 
179 
181  """
182  Create MPS mass storage directory where, e.g., mille binaries are
183  stored.
184  """
185 
186  # set directory on eos
187  self._mss_dir = self._general_options.get("massStorageDir",
188  "/eos/cms/store/group/alca_millepede/")
189  self._mss_dir = os.path.join(self._mss_dir, "MPproductionFiles",
190  self._mps_dir_name)
191 
192  cmd = ["mkdir", "-p", self._mss_dir]
193 
194  # create directory
195  if not self._general_options.get("testMode", False):
196  try:
197  with open(os.devnull, "w") as dump:
198  subprocess.check_call(cmd, stdout = dump, stderr = dump)
199  except subprocess.CalledProcessError:
200  print("Failed to create mass storage directory:", self._mss_dir)
201  sys.exit(1)
202 
203 
205  """Extract different weight configurations from `self._config`."""
206 
207  weights_list = [[(name, weight) for weight in self._weight_dict[name]]
208  for name in self._weight_dict]
209 
210  common_weights_list = [[(name, weight)
211  for weight in self._common_weights[name]]
212  for name in self._common_weights]
213 
214  common_weights_dicts = []
215  for item in itertools.product(*common_weights_list):
216  d = {}
217  for name,weight in item:
218  d[name] = weight
219  common_weights_dicts.append(d)
220 
221  weight_configs = []
222  for weight_conf in itertools.product(*weights_list):
223  number_of_configs = len(weight_configs)
224  for common_weight in common_weights_dicts:
225  replaced_config \
226  = tuple([(dataset[0],
227  reduce(lambda x,y: mps_tools.replace_factors(x, y, common_weight[y]),
228  common_weight, dataset[1]))
229  for dataset in weight_conf])
230  if replaced_config not in weight_configs:
231  weight_configs.append(replaced_config)
232 
233  # default if config contains no common weights:
234  if len(weight_configs) == number_of_configs:
235  weight_configs.append(weight_conf)
236 
237  for weight_config in weight_configs:
238  resolved_weight_config \
239  = [(dataset[0], mps_tools.compute_product_string(dataset[1]))
240  for dataset in weight_config]
241  self._weight_configs.append(resolved_weight_config)
242 
243 
245  """Fetch 'pedesettings' from general section in `self._config`."""
246 
247  self._pede_settings \
248  = ([x.strip()
249  for x in self._config.get("general", "pedesettings").split(",")]
250  if self._config.has_option("general", "pedesettings") else [None])
251 
252 
254  """Create the mille jobs based on the [dataset:<name>] sections."""
255 
256  gt_regex = re.compile('setupGlobaltag\s*\=\s*[\"\'](.*?)[\"\']')
257  sg_regex = re.compile("setupRunStartGeometry\s*\=\s*.*$", re.M)
258  collection_regex = re.compile('setupCollection\s*\=\s*[\"\'](.*?)[\"\']')
259  czt_regex = re.compile('setupCosmicsZeroTesla\s*\=\s*.*$', re.M)
260  cdm_regex = re.compile('setupCosmicsDecoMode\s*\=\s*.*$', re.M)
261  pw_regex = re.compile('setupPrimaryWidth\s*\=\s*.*$', re.M)
262  json_regex = re.compile('setupJson\s*\=\s*.*$', re.M)
263 
264  first_dataset = True
265  for name, dataset in six.iteritems(self._datasets):
266  print("="*75)
267  # Build config from template/Fill in variables
268  try:
269  with open(dataset["configTemplate"],"r") as f:
270  tmpFile = f.read()
271  except IOError:
272  print("The config-template called", end=' ')
273  print(dataset["configTemplate"], "cannot be found.")
274  sys.exit(1)
275 
276  tmpFile = re.sub(gt_regex,
277  'setupGlobaltag = \"'+dataset["globaltag"]+'\"',
278  tmpFile)
279  tmpFile = re.sub(sg_regex,
280  "setupRunStartGeometry = "+
281  self._general_options["FirstRunForStartGeometry"], tmpFile)
282  tmpFile = re.sub(collection_regex,
283  'setupCollection = \"'+dataset["collection"]+'\"',
284  tmpFile)
285  if "ALCARECOTkAlCosmics" in dataset["collection"]:
286  if dataset['cosmicsZeroTesla']:
287  tmpFile = re.sub(czt_regex,
288  'setupCosmicsZeroTesla = True',
289  tmpFile)
290  else :
291  tmpFile = re.sub(czt_regex,
292  'setupCosmicsZeroTesla = False',
293  tmpFile)
294 
295  if dataset['cosmicsDecoMode']:
296  tmpFile = re.sub(cdm_regex,
297  'setupCosmicsDecoMode = True',
298  tmpFile)
299  else:
300  tmpFile = re.sub(cdm_regex,
301  'setupCosmicsDecoMode = False',
302  tmpFile)
303 
304  if dataset['primaryWidth'] > 0.0:
305  tmpFile = re.sub(pw_regex,
306  'setupPrimaryWidth = '+str(dataset["primaryWidth"]),
307  tmpFile)
308  if dataset['json'] != '':
309  tmpFile = re.sub(json_regex,
310  'setupJson = \"'+dataset["json"]+'\"',
311  tmpFile)
312 
313  thisCfgTemplate = "tmp.py"
314  with open(thisCfgTemplate, "w") as f:
315  f.write(tmpFile)
316 
317 
318  # Set mps_setup append option for datasets following the first one
319  append = "-a"
320  if first_dataset:
321  append = ""
322  first_dataset = False
323  self._config_template = tmpFile
324  self._cms_process = mps_tools.get_process_object(thisCfgTemplate)
325  self._create_input_db()
326 
327  with open(thisCfgTemplate, "a") as f: f.write(self._override_gt)
328 
329 
330  # create mps_setup command
331  command = ["mps_setup.py",
332  "-m",
333  append,
334  "-M", self._general_options["pedeMem"],
335  "-N", name,
336  self._mille_script,
337  thisCfgTemplate,
338  dataset["inputFileList"],
339  str(dataset["njobs"]),
340  self._general_options["classInf"],
341  self._general_options["jobname"],
342  self._pede_script,
343  "cmscafuser:"+self._mss_dir]
344  if dataset["numberOfEvents"] > 0:
345  command.extend(["--max-events", str(dataset["numberOfEvents"])])
346  command = [x for x in command if len(x.strip()) > 0]
347 
348  # Some output:
349  print("Creating jobs for dataset:", name)
350  print("-"*75)
351  print("Baseconfig: ", dataset["configTemplate"])
352  print("Collection: ", dataset["collection"])
353  if "ALCARECOTkAlCosmics" in dataset["collection"]:
354  print("cosmicsDecoMode: ", dataset["cosmicsDecoMode"])
355  print("cosmicsZeroTesla: ", dataset["cosmicsZeroTesla"])
356  print("Globaltag: ", dataset["globaltag"])
357  print("Number of jobs: ", dataset["njobs"])
358  print("Inputfilelist: ", dataset["inputFileList"])
359  if dataset["json"] != "":
360  print("Jsonfile: ", dataset["json"])
361  if self._args.verbose:
362  print("Pass to mps_setup: ", " ".join(command))
363 
364  # call the command and toggle verbose output
365  self._handle_process_call(command, self._args.verbose)
366 
367  # remove temporary file
368  self._handle_process_call(["rm", thisCfgTemplate])
369 
370 
371  def _create_pede_jobs(self):
372  """Create pede jobs from the given input."""
373 
374  for setting in self._pede_settings:
375  print()
376  print("="*75)
377  if setting is None:
378  print("Creating pede job{}.".format(
379  "s" if len(self._pede_settings)*len(self._weight_configs) > 1 else ""))
380  print("-"*75)
381  else:
382  print("Creating pede jobs using settings from '{0}'.".format(setting))
383  for weight_conf in self._weight_configs:
384  # blank weights
385  self._handle_process_call(["mps_weight.pl", "-c"])
386 
387  thisCfgTemplate = "tmp.py"
388  with open(thisCfgTemplate, "w") as f: f.write(self._config_template)
389  if self._override_gt is None:
390  self._cms_process = mps_tools.get_process_object(thisCfgTemplate)
391  self._create_input_db()
392  with open(thisCfgTemplate, "a") as f: f.write(self._override_gt)
393 
394  for name,weight in weight_conf:
395  self._handle_process_call(["mps_weight.pl", "-N", name, weight], True)
396 
397  if not self._first_pede_config:
398  # create new mergejob
399  self._handle_process_call(["mps_setupm.pl"], self._args.verbose)
400 
401  # read mps.db to find directory of new mergejob
402  lib = mpslib.jobdatabase()
403  lib.read_db()
404 
405  # short cut for jobm path
406  jobm_path = os.path.join("jobData", lib.JOBDIR[-1])
407 
408  # delete old merge-config
409  command = ["rm", "-f", os.path.join(jobm_path, "alignment_merge.py")]
410  self._handle_process_call(command, self._args.verbose)
411 
412  # create new merge-config
413  command = [
414  "mps_merge.py",
415  "-w", thisCfgTemplate,
416  os.path.join(jobm_path, "alignment_merge.py"),
417  jobm_path,
418  str(lib.nJobs),
419  ]
420  if setting is not None: command.extend(["-a", setting])
421  print("-"*75)
422  print(" ".join(command))
423  self._handle_process_call(command, self._args.verbose)
424  self._create_tracker_tree()
425  if self._first_pede_config:
426  os.symlink(self._tracker_tree_path,
427  os.path.abspath(os.path.join(jobm_path,
428  ".TrackerTree.root")))
429  self._first_pede_config = False
430 
431  # store weights configuration
432  with open(os.path.join(jobm_path, ".weights.pkl"), "wb") as f:
433  cPickle.dump(weight_conf, f, 2)
434  print("="*75)
435 
436  # remove temporary file
437  self._handle_process_call(["rm", thisCfgTemplate])
438 
439 
441  """
442  Create pede jobs in addition to already existing ones. Return GT
443  override snippet.
444  """
445 
446  # do some basic checks
447  if not os.path.isdir("jobData"):
448  print("No jobData-folder found.", end=' ')
449  print("Properly set up the alignment before using the -w option.")
450  sys.exit(1)
451  if not os.path.exists("mps.db"):
452  print("No mps.db found.", end=' ')
453  print("Properly set up the alignment before using the -w option.")
454  sys.exit(1)
455 
456  firstDataset = next(six.itervalues(self._datasets))
457  config_template = firstDataset["configTemplate"]
458  collection = firstDataset["collection"]
459 
460  try:
461  with open(config_template,"r") as f:
462  tmpFile = f.read()
463  except IOError:
464  print("The config-template '"+config_template+"' cannot be found.")
465  sys.exit(1)
466 
467  tmpFile = re.sub('setupGlobaltag\s*\=\s*[\"\'](.*?)[\"\']',
468  'setupGlobaltag = \"'+self._global_tag+'\"',
469  tmpFile)
470  tmpFile = re.sub('setupCollection\s*\=\s*[\"\'](.*?)[\"\']',
471  'setupCollection = \"'+collection+'\"',
472  tmpFile)
473  tmpFile = re.sub(re.compile("setupRunStartGeometry\s*\=\s*.*$", re.M),
474  "setupRunStartGeometry = "+self._first_run,
475  tmpFile)
476  self._config_template = tmpFile
477 
478  # first pede job exists already in this mode:
479  self._first_pede_config = False
480  self._create_pede_jobs()
481 
482 
483  def _handle_process_call(self, command, verbose = False):
484  """
485  Wrapper around subprocess calls which treats output depending on verbosity
486  level.
487 
488  Arguments:
489  - `command`: list of command items
490  - `verbose`: flag to turn on verbosity
491  """
492 
493  call_method = subprocess.check_call if verbose else subprocess.check_output
494  try:
495  call_method(command, stderr=subprocess.STDOUT)
496  except subprocess.CalledProcessError as e:
497  print("" if verbose else e.output)
498  print("Failed to execute command:", " ".join(command))
499  sys.exit(1)
500 
501 
502  def _create_input_db(self):
503  """
504  Create sqlite file with single-IOV tags and use it to override the
505  GT. If the GT is already customized by the user, the customization has
506  higher priority. Creates a snippet to be appended to the configuration
507  file.
508  """
509 
510  run_number = int(self._first_run)
511  if not run_number > 0:
512  print("'FirstRunForStartGeometry' must be positive, but is", run_number)
513  sys.exit(1)
514 
515  input_db_name = os.path.abspath("alignment_input.db")
516  tags = mps_tools.create_single_iov_db(self._check_iov_definition(),
517  run_number, input_db_name)
518 
519  self._override_gt = ""
520  for record,tag in six.iteritems(tags):
521  if self._override_gt == "":
522  self._override_gt \
523  += ("\nimport "
524  "Alignment.MillePedeAlignmentAlgorithm.alignmentsetup."
525  "SetCondition as tagwriter\n")
526  self._override_gt += ("\ntagwriter.setCondition(process,\n"
527  " connect = \""+tag["connect"]+"\",\n"
528  " record = \""+record+"\",\n"
529  " tag = \""+tag["tag"]+"\")\n")
530 
531 
533  """
534  Check consistency of input alignment payloads and IOV definition.
535  Returns a dictionary with the information needed to override possibly
536  problematic input taken from the global tag.
537  """
538 
539  print("Checking consistency of IOV definition...")
540  iovs = mps_tools.make_unique_runranges(self._cms_process.AlignmentProducer)
541 
542  inputs = {
543  "TrackerAlignmentRcd": None,
544  "TrackerSurfaceDeformationRcd": None,
545  "TrackerAlignmentErrorExtendedRcd": None,
546  }
547 
548  for condition in self._cms_process.GlobalTag.toGet.value():
549  if condition.record.value() in inputs:
550  inputs[condition.record.value()] = {
551  "tag": condition.tag.value(),
552  "connect": ("pro"
553  if not condition.hasParameter("connect")
554  else condition.connect.value())
555  }
556 
557  inputs_from_gt = [record for record in inputs if inputs[record] is None]
558  inputs.update(
559  mps_tools.get_tags(self._cms_process.GlobalTag.globaltag.value(),
560  inputs_from_gt))
561 
562  if int(self._first_run) != iovs[0]: # simple consistency check
563  if iovs[0] == 1 and len(iovs) == 1:
564  print("Single IOV output detected in configuration and", end=' ')
565  print("'FirstRunForStartGeometry' is not 1.")
566  print("Creating single IOV output from input conditions in run", end=' ')
567  print(self._first_run+".")
568  for inp in inputs: inputs[inp]["problematic"] = True
569  else:
570  print("Value of 'FirstRunForStartGeometry' has to match first", end=' ')
571  print("defined output IOV:", end=' ')
572  print(self._first_run, "!=", iovs[0])
573  sys.exit(1)
574 
575  for inp in six.itervalues(inputs):
576  inp["iovs"] = mps_tools.get_iovs(inp["connect"], inp["tag"])
577 
578  # check consistency of input with output
579  problematic_gt_inputs = {}
580  input_indices = {key: len(value["iovs"]) -1
581  for key,value in six.iteritems(inputs)}
582  for iov in reversed(iovs):
583  for inp in inputs:
584  if inputs[inp].pop("problematic", False):
585  problematic_gt_inputs[inp] = inputs[inp]
586  if inp in problematic_gt_inputs: continue
587  if input_indices[inp] < 0:
588  print("First output IOV boundary at run", iov, end=' ')
589  print("is before the first input IOV boundary at", end=' ')
590  print(inputs[inp]["iovs"][0], "for '"+inp+"'.")
591  print("Please check your run range selection.")
592  sys.exit(1)
593  input_iov = inputs[inp]["iovs"][input_indices[inp]]
594  if iov < input_iov:
595  if inp in inputs_from_gt:
596  problematic_gt_inputs[inp] = inputs[inp]
597  print("Found problematic input taken from global tag.")
598  print("Input IOV boundary at run",input_iov, end=' ')
599  print("for '"+inp+"' is within output IOV starting", end=' ')
600  print("with run", str(iov)+".")
601  print("Deriving an alignment with coarse IOV", end=' ')
602  print("granularity starting from finer granularity", end=' ')
603  print("leads to wrong results.")
604  print("A single IOV input using the IOV of", end=' ')
605  print("'FirstRunForStartGeometry' ("+self._first_run+")", end=' ')
606  print("is automatically created and used.")
607  continue
608  print("Found input IOV boundary at run",input_iov, end=' ')
609  print("for '"+inp+"' which is within output IOV starting", end=' ')
610  print("with run", str(iov)+".")
611  print("Deriving an alignment with coarse IOV granularity", end=' ')
612  print("starting from finer granularity leads to wrong", end=' ')
613  print("results.")
614  print("Please check your run range selection.")
615  sys.exit(1)
616  elif iov == input_iov:
617  input_indices[inp] -= 1
618 
619  # check consistency of 'TrackerAlignmentRcd' with other inputs
620  input_indices = {key: len(value["iovs"]) -1
621  for key,value in six.iteritems(inputs)
622  if (key != "TrackerAlignmentRcd")
623  and (inp not in problematic_gt_inputs)}
624  for iov in reversed(inputs["TrackerAlignmentRcd"]["iovs"]):
625  for inp in input_indices:
626  input_iov = inputs[inp]["iovs"][input_indices[inp]]
627  if iov < input_iov:
628  print("Found input IOV boundary at run",input_iov, end=' ')
629  print("for '"+inp+"' which is within 'TrackerAlignmentRcd'", end=' ')
630  print("IOV starting with run", str(iov)+".")
631  print("Deriving an alignment with inconsistent IOV boundaries", end=' ')
632  print("leads to wrong results.")
633  print("Please check your input IOVs.")
634  sys.exit(1)
635  elif iov == input_iov:
636  input_indices[inp] -= 1
637 
638  print(" -> IOV consistency check successful.")
639  print("="*75)
640 
641  return problematic_gt_inputs
642 
643 
645  """Method to create hidden 'TrackerTree.root'."""
646 
647  if self._global_tag is None or self._first_run is None:
648  print("Trying to create the tracker tree before setting the global", end=' ')
649  print("tag or the run to determine the geometry IOV.")
650  sys.exit(1)
651 
652  config = mpsv_iniparser.ConfigData()
653  config.jobDataPath = "." # current directory
654  config.globalTag = self._global_tag
655  config.firstRun = self._first_run
656  self._tracker_tree_path = mpsv_trackerTree.check(config)
657 
658 
659  def _fetch_essentials(self):
660  """Fetch general options from config file."""
661 
662  for var in ("classInf","pedeMem","jobname", "FirstRunForStartGeometry"):
663  try:
664  self._general_options[var] = self._config.get('general',var)
665  except ConfigParser.NoOptionError:
666  print("No", var, "found in [general] section.", end=' ')
667  print("Please check ini-file.")
668  sys.exit(1)
669  self._first_run = self._general_options["FirstRunForStartGeometry"]
670 
671 
672  def _fetch_defaults(self):
673  """Fetch default general options from config file."""
674 
675  for var in ("globaltag", "configTemplate", "json", "massStorageDir",
676  "testMode"):
677  try:
678  self._general_options[var] = self._config.get("general", var)
679  except ConfigParser.NoOptionError:
680  if var == "testMode": continue
681  print("No '" + var + "' given in [general] section.")
682 
683  for dataset in six.itervalues(self._external_datasets):
684  dataset["general"] = {}
685  for var in ("globaltag", "configTemplate", "json"):
686  try:
687  dataset["general"][var] = dataset["config"].get("general", var)
688  except (ConfigParser.NoSectionError,ConfigParser.NoOptionError):
689  pass
690 
691 
693  """
694  Fetch 'datasetDir' variable from general section and add it to the
695  'os.environ' dictionary.
696  """
697 
698  if self._config.has_option("general", "datasetdir"):
699  dataset_directory = self._config.get("general", "datasetdir")
700  # add it to environment for later variable expansion:
701  os.environ["datasetdir"] = dataset_directory
702  self._general_options["datasetdir"] = dataset_directory
703  else:
704  print("No datasetdir given in [general] section.", end=' ')
705  print("Be sure to give a full path in inputFileList.")
706  self._general_options["datasetdir"] = ""
707 
708 
709  def _fetch_datasets(self):
710  """Fetch internal and external dataset configurations."""
711 
712  all_configs = collections.OrderedDict()
713  all_configs["main"] = {"config": self._config,
714  "general": self._general_options,
715  "weight": None}
716  all_configs.update(self._external_datasets)
717 
718  for config in six.itervalues(all_configs):
719  global_weight = "1" if config["weight"] is None else config["weight"]
720  if global_weight+self._config.config_path in self._common_weights:
721  global_weight = self._common_weights[global_weight+
722  self._config.config_path]
723  elif global_weight in self._common_weights:
724  global_weight = self._common_weights[global_weight]
725  else:
726  global_weight = (global_weight,)
727  common_weights = {}
728  weight_dict = {}
729  for section in config["config"].sections():
730  cache_datasetdir = os.environ["datasetdir"]
731  if "general" in section:
732  if config["config"].has_option("general", "datasetdir"):
733  os.environ["datasetdir"] = config["config"].get("general", "datasetdir")
734  elif section == "weights":
735  for option in config["config"].options(section):
736  common_weights[option] \
737  = [x.strip() for x in
738  config["config"].get(section, option).split(",")]
739  elif section.startswith("dataset:"):
740  print("-"*75)
741  # set name from section-name
742  name = section[8:]
743  if name in self._datasets:
744  print("WARNING: Duplicate definition of dataset '{}'".format(name))
745  print(" -> Using defintion in '{}':\n".format(config["config"].config_path))
746  print(" [{}]".format(section))
747  for k,v in config["config"].items(section):
748  print(" ", k, "=", v)
749  print()
750  self._datasets[name] = {}
751 
752  # extract weight for the dataset
753  if config["config"].has_option(section, "weight"):
754  self._weight_dict[name] \
755  = [x.strip() for x in
756  config["config"].get(section, "weight").split(",")]
757  else:
758  self._weight_dict[name] = ["1.0"]
759  self._weight_dict[name] = [global_w+"*"+w
760  for w in self._weight_dict[name]
761  for global_w in global_weight]
762  weight_dict[name] = self._weight_dict[name]
763 
764  # extract essential variables
765  for var in ("inputFileList", "collection"):
766  try:
767  self._datasets[name][var] = config["config"].get(section, var)
768  except ConfigParser.NoOptionError:
769  print("No", var, "found in", section+". Please check ini-file.")
770  sys.exit(1)
771 
772  # get globaltag and configTemplate. If none in section, try to get
773  # default from [general] section.
774  for var in ("configTemplate", "globaltag"):
775  try:
776  self._datasets[name][var] = config["config"].get(section, var)
777  except (ConfigParser.NoSectionError,ConfigParser.NoOptionError):
778  try:
779  self._datasets[name][var] = config["general"][var]
780  except KeyError:
781  try:
782  self._datasets[name][var] \
783  = all_configs["main"]["general"][var]
784  except KeyError:
785  print("No",var,"found in ["+section+"]", end=' ')
786  print("and no default in [general] section.")
787  sys.exit(1)
788 
789  # extract non-essential options
790  if "ALCARECOTkAlCosmics" in self._datasets[name]["collection"]:
791  try:
792  self._datasets[name]["cosmicsZeroTesla"] \
793  = config["config"].getboolean(section,"cosmicsZeroTesla")
794  except ConfigParser.NoOptionError:
795  print("No option cosmicsZeroTesla found in", section,"even though it is required for dataset type", self._datasets[name]["collection"], ". Please check ini-file.")
796  sys.exit(1)
797  try:
798  self._datasets[name]["cosmicsDecoMode"] \
799  = config["config"].getboolean(section,"cosmicsDecoMode")
800  except ConfigParser.NoOptionError:
801  print("No option cosmicsDecoMode found in", section,"even though it is required for dataset type", self._datasets[name]["collection"], ".Please check ini-file.")
802  sys.exit(1)
803 
804  self._datasets[name]["primaryWidth"] = -1.0
805  if config["config"].has_option(section,"primaryWidth"):
806  self._datasets[name]["primaryWidth"] \
807  = config["config"].getfloat(section,"primaryWidth")
808 
809  self._datasets[name]["numberOfEvents"] = -1
810  if config["config"].has_option(section, "numberOfEvents"):
811  self._datasets[name]["numberOfEvents"] \
812  = config["config"].getint(section, "numberOfEvents")
813 
814  self._datasets[name]["json"] = ""
815  try:
816  self._datasets[name]["json"] = config["config"].get(section,"json")
817  except ConfigParser.NoOptionError:
818  try:
819  self._datasets[name]["json"] = config["general"]["json"]
820  except KeyError:
821  try:
822  self._datasets[name]["json"] \
823  = all_configs["main"]["general"]["json"]
824  except KeyError:
825  print("No json given in either [general] or", end=' ')
826  print("["+section+"] sections.")
827  print(" -> Proceeding without json-file.")
828 
829 
830  #replace ${datasetdir} and other variables, e.g. $CMSSW_BASE
831  for var in ("inputFileList", "json", "configTemplate"):
832  self._datasets[name][var] \
833  = os.path.expandvars(self._datasets[name][var])
834 
835 
836  # Get number of jobs from lines in inputfilelist
837  self._datasets[name]["njobs"] = 0
838  try:
839  with open(self._datasets[name]["inputFileList"], "r") as filelist:
840  for line in filelist:
841  if "CastorPool" in line:
842  continue
843  # ignore empty lines
844  if not line.strip()=="":
845  self._datasets[name]["njobs"] += 1
846  except IOError:
847  print("Inputfilelist", self._datasets[name]["inputFileList"], end=' ')
848  print("does not exist.")
849  sys.exit(1)
850  if self._datasets[name]["njobs"] == 0:
851  print("Number of jobs is 0. There may be a problem with the inputfilelist:")
852  print(self._datasets[name]["inputFileList"])
853  sys.exit(1)
854 
855  # Check if njobs gets overwritten in .ini-file
856  if config["config"].has_option(section, "njobs"):
857  if config["config"].getint(section, "njobs") <= self._datasets[name]["njobs"]:
858  self._datasets[name]["njobs"] = config["config"].getint(section, "njobs")
859  else:
860  print("'njobs' is bigger than the number of files for this", end=' ')
861  print("dataset:", self._datasets[name]["njobs"])
862  print("Using default.")
863  else:
864  print("No number of jobs specified. Using number of files in", end=' ')
865  print("inputfilelist as the number of jobs.")
866 
867  # check if local weights override global weights and resolve name clashes
868  for weight_name, weight_values in six.iteritems(common_weights):
869  for key, weight in six.iteritems(weight_dict):
870  if any([weight_name in w for w in weight]):
871  self._common_weights[weight_name+config["config"].config_path] = weight_values
872  self._weight_dict[key] = [mps_tools.replace_factors(w,
873  weight_name,
874  weight_name+config["config"].config_path)
875  for w in weight]
876  else:
877  self._common_weights[weight_name] = weight_values
878  self._weight_dict[key] = weight
879 
880  os.environ["datasetdir"] = cache_datasetdir
881 
882  if len(self._datasets) == 0:
883  print("No dataset section defined in '{0}'".format(
884  ", ".join([self._args.aligmentConfig]+self._external_datasets.keys())))
885  print("At least one section '[dataset:<name>]' is required.")
886  sys.exit(1)
887 
888  self._global_tag = self._datasets[name]["globaltag"]
889 
890 
891 
892 if __name__ == "__main__":
893  try:
894  main()
895  except KeyboardInterrupt:
896  pass
mps_alisetup.SetupAlignment._fill_general_options
def _fill_general_options(self)
Definition: mps_alisetup.py:147
resolutioncreator_cfi.object
object
Definition: resolutioncreator_cfi.py:4
mps_alisetup.SetupAlignment._tracker_tree_path
_tracker_tree_path
Definition: mps_alisetup.py:656
mps_alisetup.SetupAlignment._first_pede_config
_first_pede_config
Definition: mps_alisetup.py:67
mps_alisetup.SetupAlignment._common_weights
_common_weights
Definition: mps_alisetup.py:63
mps_alisetup.SetupAlignment._mss_dir
_mss_dir
Definition: mps_alisetup.py:54
digitizers_cfi.strip
strip
Definition: digitizers_cfi.py:19
mps_alisetup.SetupAlignment._override_gt
_override_gt
Definition: mps_alisetup.py:58
mps_alisetup.SetupAlignment._create_config
def _create_config(self)
Definition: mps_alisetup.py:97
join
static std::string join(char **cmd)
Definition: RemoteFile.cc:17
mps_alisetup.SetupAlignment._first_run
_first_run
Definition: mps_alisetup.py:56
mps_alisetup.SetupAlignment._mps_dir_name
_mps_dir_name
Definition: mps_alisetup.py:62
mps_alisetup.SetupAlignment._check_iov_definition
def _check_iov_definition(self)
Definition: mps_alisetup.py:532
relativeConstraints.keys
keys
Definition: relativeConstraints.py:89
mps_alisetup.SetupAlignment._fetch_defaults
def _fetch_defaults(self)
Definition: mps_alisetup.py:672
mps_alisetup.SetupAlignment._construct_paths
def _construct_paths(self)
Definition: mps_alisetup.py:124
mps_alisetup.SetupAlignment._create_additional_pede_jobs
def _create_additional_pede_jobs(self)
Definition: mps_alisetup.py:440
mps_monitormerge.items
list items
Definition: mps_monitormerge.py:29
any
bool any(const std::vector< T > &v, const T &what)
Definition: ECalSD.cc:38
mps_alisetup.SetupAlignment._cms_process
_cms_process
Definition: mps_alisetup.py:57
options
Definition: options.py:1
mps_alisetup.SetupAlignment._create_mille_jobs
def _create_mille_jobs(self)
Definition: mps_alisetup.py:253
mps_alisetup.SetupAlignment._external_datasets
_external_datasets
Definition: mps_alisetup.py:66
submitPVValidationJobs.split
def split(sequence, size)
Definition: submitPVValidationJobs.py:352
str
#define str(s)
Definition: TestProcessor.cc:52
mps_alisetup.SetupAlignment._create_mass_storage_directory
def _create_mass_storage_directory(self)
Definition: mps_alisetup.py:180
mps_alisetup.SetupAlignment._args
_args
Definition: mps_alisetup.py:52
mps_alisetup.SetupAlignment._fetch_external_datasets
def _fetch_external_datasets(self)
Definition: mps_alisetup.py:157
mps_alisetup.SetupAlignment._handle_process_call
def _handle_process_call(self, command, verbose=False)
Definition: mps_alisetup.py:483
mps_alisetup.SetupAlignment.__init__
def __init__(self, argv)
Definition: mps_alisetup.py:44
mps_alisetup.SetupAlignment.setup
def setup(self)
Definition: mps_alisetup.py:78
mps_alisetup.SetupAlignment._create_input_db
def _create_input_db(self)
Definition: mps_alisetup.py:502
print
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:46
mps_alisetup.SetupAlignment._create_pede_jobs
def _create_pede_jobs(self)
Definition: mps_alisetup.py:371
mps_setup.append
append
Definition: mps_setup.py:85
createfilelist.int
int
Definition: createfilelist.py:10
mps_alisetup.SetupAlignment._fetch_essentials
def _fetch_essentials(self)
Definition: mps_alisetup.py:659
mps_alisetup.SetupAlignment._config
_config
Definition: mps_alisetup.py:53
helper.checked_out_MPS
def checked_out_MPS()
Definition: helper.py:5
main
Definition: main.py:1
mps_alisetup.SetupAlignment._datasets
_datasets
Definition: mps_alisetup.py:55
readEcalDQMStatus.read
read
Definition: readEcalDQMStatus.py:38
mps_alisetup.SetupAlignment._global_tag
_global_tag
Definition: mps_alisetup.py:888
mps_alisetup.SetupAlignment._general_options
_general_options
Definition: mps_alisetup.py:65
mps_alisetup.SetupAlignment._config_template
_config_template
Definition: mps_alisetup.py:323
mps_alisetup.SetupAlignment._pede_script
_pede_script
Definition: mps_alisetup.py:59
mps_alisetup.SetupAlignment._fetch_pede_settings
def _fetch_pede_settings(self)
Definition: mps_alisetup.py:244
format
mps_alisetup.main
def main(argv=None)
Definition: mps_alisetup.py:26
mps_alisetup.SetupAlignment._mille_script
_mille_script
Definition: mps_alisetup.py:61
mps_alisetup.SetupAlignment
Definition: mps_alisetup.py:41
genParticles_cff.map
map
Definition: genParticles_cff.py:11
mps_alisetup.SetupAlignment._weight_dict
_weight_dict
Definition: mps_alisetup.py:60
mps_alisetup.SetupAlignment._fetch_datasets
def _fetch_datasets(self)
Definition: mps_alisetup.py:709
mps_alisetup.SetupAlignment._weight_configs
_weight_configs
Definition: mps_alisetup.py:64
mps_alisetup.SetupAlignment._fetch_dataset_directory
def _fetch_dataset_directory(self)
Definition: mps_alisetup.py:692
mps_alisetup.SetupAlignment._create_weight_configs
def _create_weight_configs(self)
Definition: mps_alisetup.py:204
mps_alisetup.SetupAlignment._create_tracker_tree
def _create_tracker_tree(self)
Definition: mps_alisetup.py:644
GetRecoTauVFromDQM_MC_cff.next
next
Definition: GetRecoTauVFromDQM_MC_cff.py:31
mps_alisetup.SetupAlignment._argv
_argv
Definition: mps_alisetup.py:51