CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Groups Pages
uploadConditions_v2.py
Go to the documentation of this file.
1 #!/usr/bin/env python3
2 """
3 Primary Author:
4 Joshua Dawes - CERN, CMS - The University of Manchester
5 
6 Debugging, Integration and Maintenance:
7 Andres Cardenas - CERN, CMS - Universidad San Francisco
8 
9 Upload script wrapper - controls the automatic update system.
10 
11 Note: the name of the file follows a different convention to the others because it should be the same as the current upload script name.
12 
13 Takes user arguments and passes them to the main upload module CondDBFW.uploads, once the correct version exists.
14 
15 1. Ask the server corresponding to the database we're uploading to which version of CondDBFW it has (query the /conddbfw_version/ url).
16 2. Decide which directory that we can write to - either the current local directory, or /tmp/random_string/.
17 3. Pull the commit returned from the server into the directory from step 2.
18 4. Invoke the CondDBFW.uploads module with the arguments given to this script.
19 
20 """
21 
22 __version__ = 1
23 
24 try:
25  from CondCore.Utilities.CondDBFW.url_query import url_query
26 except:
27  print("ERROR: Could not access the url query utiliy. Yoy are probably not in a CMSSW environment.")
28  exit(-1)
29 try:
30  from StringIO import StringIO
31 except:
32  pass
33 import traceback
34 import sys
35 import os
36 import json
37 import subprocess
38 import optparse
39 import netrc
40 import shutil
41 import getpass
42 import errno
43 import sqlite3
44 
45 
46 horizontal_rule = "="*60
47 
48 def run_upload(**parameters):
49  """
50  Imports CondDBFW.uploads and runs the upload with the upload metadata obtained.
51  """
52  try:
53  import CondCore.Utilities.CondDBFW.uploads as uploads
54  except Exception as e:
55  traceback.print_exc()
56  exit("CondDBFW or one of its dependencies could not be imported.\n"\
57  + "If the CondDBFW directory exists, you are likely not in a CMSSW environment.")
58  # we have CondDBFW, so just call the module with the parameters given in the command line
59  uploader = uploads.uploader(**parameters)
60  result = uploader.upload()
61 
62 def getInput(default, prompt = ''):
63  '''Like raw_input() but with a default and automatic strip().
64  '''
65 
66  answer = raw_input(prompt)
67  if answer:
68  return answer.strip()
69 
70  return default.strip()
71 
72 
73 def getInputWorkflow(prompt = ''):
74  '''Like getInput() but tailored to get target workflows (synchronization options).
75  '''
76 
77  while True:
78  workflow = getInput(defaultWorkflow, prompt)
79 
80  if workflow in frozenset(['offline', 'hlt', 'express', 'prompt', 'pcl']):
81  return workflow
82 
83  print('Please specify one of the allowed workflows. See above for the explanation on each of them.')
84 
85 
86 def getInputChoose(optionsList, default, prompt = ''):
87  '''Makes the user choose from a list of options.
88  '''
89 
90  while True:
91  index = getInput(default, prompt)
92 
93  try:
94  return optionsList[int(index)]
95  except ValueError:
96  print('Please specify an index of the list (i.e. integer).')
97  except IndexError:
98  print('The index you provided is not in the given list.')
99 
100 
101 def getInputRepeat(prompt = ''):
102  '''Like raw_input() but repeats if nothing is provided and automatic strip().
103  '''
104 
105  while True:
106  answer = raw_input(prompt)
107  if answer:
108  return answer.strip()
109 
110  print('You need to provide a value.')
111 
112 def runWizard(basename, dataFilename, metadataFilename):
113  while True:
114  print('''\nWizard for metadata for %s
115 
116 I will ask you some questions to fill the metadata file. For some of the questions there are defaults between square brackets (i.e. []), leave empty (i.e. hit Enter) to use them.''' % basename)
117 
118  # Try to get the available inputTags
119  try:
120  dataConnection = sqlite3.connect(dataFilename)
121  dataCursor = dataConnection.cursor()
122  dataCursor.execute('select name from sqlite_master where type == "table"')
123  tables = set(zip(*dataCursor.fetchall())[0])
124 
125  # only conddb V2 supported...
126  if 'TAG' in tables:
127  dataCursor.execute('select NAME from TAG')
128  # In any other case, do not try to get the inputTags
129  else:
130  raise Exception()
131 
132  inputTags = dataCursor.fetchall()
133  if len(inputTags) == 0:
134  raise Exception()
135  inputTags = list(zip(*inputTags))[0]
136 
137  except Exception:
138  inputTags = []
139 
140  if len(inputTags) == 0:
141  print('\nI could not find any input tag in your data file, but you can still specify one manually.')
142 
143  inputTag = getInputRepeat(
144  '\nWhich is the input tag (i.e. the tag to be read from the SQLite data file)?\ne.g. BeamSpotObject_ByRun\ninputTag: ')
145 
146  else:
147  print('\nI found the following input tags in your SQLite data file:')
148  for (index, inputTag) in enumerate(inputTags):
149  print(' %s) %s' % (index, inputTag))
150 
151  inputTag = getInputChoose(inputTags, '0',
152  '\nWhich is the input tag (i.e. the tag to be read from the SQLite data file)?\ne.g. 0 (you select the first in the list)\ninputTag [0]: ')
153 
154  databases = {
155  'oraprod': 'oracle://cms_orcon_prod/CMS_CONDITIONS',
156  'prod': 'oracle://cms_orcon_prod/CMS_CONDITIONS',
157  'oradev': 'oracle://cms_orcoff_prep/CMS_CONDITIONS',
158  'prep': 'oracle://cms_orcoff_prep/CMS_CONDITIONS',
159  }
160 
161  destinationDatabase = ''
162  ntry = 0
163  print('\nWhich is the destination database where the tags should be exported?')
164  print('\n%s) %s' % ('oraprod', databases['oraprod']))
165  print('\n%s) %s' % ('oradev', databases['oradev']))
166 
167  while ( destinationDatabase not in databases.values() ):
168  if ntry==0:
169  inputMessage = \
170  '\nPossible choices: oraprod or oradev \ndestinationDatabase: '
171  elif ntry==1:
172  inputMessage = \
173  '\nPlease choose one of the two valid destinations: oraprod or oradev \ndestinationDatabase: '
174  else:
175  raise Exception('No valid destination chosen. Bailing out...')
176 
177  databaseInput = getInputRepeat(inputMessage).lower()
178  if databaseInput in databases.keys():
179  destinationDatabase = databases[databaseInput]
180  ntry += 1
181 
182  while True:
183  since = getInput('',
184  '\nWhich is the given since? (if not specified, the one from the SQLite data file will be taken -- note that even if specified, still this may not be the final since, depending on the synchronization options you select later: if the synchronization target is not offline, and the since you give is smaller than the next possible one (i.e. you give a run number earlier than the one which will be started/processed next in prompt/hlt/express), the DropBox will move the since ahead to go to the first safe run instead of the value you gave)\ne.g. 1234\nsince []: ')
185  if not since:
186  since = None
187  break
188  else:
189  try:
190  since = int(since)
191  break
192  except ValueError:
193  print('The since value has to be an integer or empty (null).')
194 
195  userText = getInput('',
196  '\nWrite any comments/text you may want to describe your request\ne.g. Muon alignment scenario for...\nuserText []: ')
197 
198  destinationTags = {}
199  while True:
200  destinationTag = getInput('',
201  '\nWhich is the next destination tag to be added (leave empty to stop)?\ne.g. BeamSpotObjects_PCL_byRun_v0_offline\ndestinationTag []: ')
202  if not destinationTag:
203  if len(destinationTags) == 0:
204  print('There must be at least one destination tag.')
205  continue
206  break
207 
208  if destinationTag in destinationTags:
209  print(
210  'You already added this destination tag. Overwriting the previous one with this new one.')
211 
212  destinationTags[destinationTag] = {
213  }
214 
215  metadata = {
216  'destinationDatabase': destinationDatabase,
217  'destinationTags': destinationTags,
218  'inputTag': inputTag,
219  'since': since,
220  'userText': userText,
221  }
222 
223  metadata = json.dumps(metadata, sort_keys=True, indent=4)
224  print('\nThis is the generated metadata:\n%s' % metadata)
225 
226  if getInput('n',
227  '\nIs it fine (i.e. save in %s and *upload* the conditions if this is the latest file)?\nAnswer [n]: ' % metadataFilename).lower() == 'y':
228  break
229  print('Saving generated metadata in %s...'% metadataFilename)
230  with open(metadataFilename, 'wb') as metadataFile:
231  metadataFile.write(metadata)
232 
234  # read in command line arguments, and build metadata dictionary from them
235  parser = optparse.OptionParser(description="CMS Conditions Upload Script in CondDBFW.",
236  usage = 'Usage: %prog [options] <file>')
237 
238  # metadata arguments
239  parser.add_option("-i", "--inputTag", type=str,\
240  help="Tag to take IOVs + Payloads from in --sourceDB.")
241  parser.add_option("-t", "--destinationTag", type=str,\
242  help="Tag to copy IOVs + Payloads to in --destDB.")
243  parser.add_option("-D", "--destinationDatabase", type=str,\
244  help="Database to copy IOVs + Payloads to.")
245  parser.add_option("-s", "--since", type=int,\
246  help="Since to take IOVs from.")
247  parser.add_option("-u", "--userText", type=str,\
248  help="Description of --destTag (can be empty).")
249 
250  # non-metadata arguments
251  parser.add_option("-m", "--metadataFile", type=str, help="Metadata file to take metadata from.")
252 
253  parser.add_option("-d", "--debug", action="store_true", default=False)
254  parser.add_option("-v", "--verbose", action="store_true", default=False)
255  parser.add_option("-T", "--testing", action="store_true")
256  parser.add_option("--fcsr-filter", type=str, help="Synchronization to take FCSR from for local filtering of IOVs.")
257 
258  parser.add_option("-n", "--netrc", help = 'The netrc host (machine) from where the username and password will be read.')
259 
260  parser.add_option("-a", "--authPath", help = 'The path of the .netrc file for the authentication. Default: $HOME')
261 
262  parser.add_option("-H", "--hashToUse")
263 
264  parser.add_option("-S", "--server")
265 
266  parser.add_option("-o", "--review-options", action="store_true")
267 
268  parser.add_option("-r", "--replay-file")
269 
270  (command_line_data, arguments) = parser.parse_args()
271 
272  if len(arguments) < 1:
273  if command_line_data.hashToUse == None:
274  parser.print_help()
275  exit(-2)
276 
277  command_line_data.sourceDB = arguments[0]
278 
279  if command_line_data.replay_file:
280  dictionary = json.loads("".join(open(command_line_data.replay_file, "r").readlines()))
281  command_line_data.tier0_response = dictionary["tier0_response"]
282 
283  # default is the production server, which can point to either database anyway
284  server_alias_to_url = {
285  "prep" : "https://cms-conddb-dev.cern.ch/cmsDbCondUpload/",
286  "dev" : "https://cms-conddb-dev.cern.ch/cmsDbCondUpload/",
287  "prod" : "https://cms-conddb.cern.ch/cmsDbCondUpload/"
288  }
289 
290  # if prep, prod or None were given, convert to URLs in dictionary server_alias_to_url
291  # if not, assume a URL has been given and use this instead
292  if command_line_data.server in server_alias_to_url.keys():
293  command_line_data.server = server_alias_to_url[command_line_data.server]
294 
295  # resolve destination databases
296  database_alias_to_connection = {
297  "prep": "oracle://cms_orcoff_prep/CMS_CONDITIONS",
298  "dev": "oracle://cms_orcoff_prep/CMS_CONDITIONS",
299  "prod": "oracle://cms_orcon_adg/CMS_CONDITIONS"
300  }
301 
302  if command_line_data.destinationDatabase in database_alias_to_connection.keys():
303  command_line_data.destinationDatabase = database_alias_to_connection[command_line_data.destinationDatabase]
304 
305 
306  # use netrc to get username and password
307  try:
308  netrc_file = command_line_data.netrc
309  auth_path = command_line_data.authPath
310  if not auth_path is None:
311  if netrc_file is None:
312  netrc_file = os.path.join(auth_path,'.netrc')
313  else:
314  netrc_file = os.path.join(auth_path, netrc_file)
315 
316  netrc_authenticators = netrc.netrc(netrc_file).authenticators("ConditionUploader")
317  if netrc_authenticators == None:
318  print("Your netrc file must contain the key 'ConditionUploader'.")
319  manual_input = raw_input("Do you want to try to type your credentials? ")
320  if manual_input == "y":
321  # ask for username and password
322  username = raw_input("Username: ")
323  password = getpass.getpass("Password: ")
324  else:
325  exit()
326  else:
327  username = netrc_authenticators[0]
328  password = netrc_authenticators[2]
329  except:
330  print("Couldn't obtain your credentials (either from netrc or manual input).")
331  exit()
332 
333  command_line_data.username = username
334  command_line_data.password = password
335  # this will be used as the final destinationTags value by all input methods
336  # apart from the metadata file
337  command_line_data.destinationTags = {command_line_data.destinationTag:{}}
338 
339  """
340  Construct metadata_dictionary:
341  Currently, this is 3 cases:
342 
343  1) An IOV is being appended to an existing Tag with an existing Payload.
344  In this case, we just take all data from the command line.
345 
346  2) No metadata file is given, so we assume that ALL upload metadata is coming from the command line.
347 
348  3) A metadata file is given, hence we parse the file, and then iterate through command line arguments
349  since these override the options set in the metadata file.
350 
351  """
352 
353  # Hash to use, entirely from command line
354  if command_line_data.hashToUse != None:
355  command_line_data.userText = ""
356  metadata_dictionary = command_line_data.__dict__
357  elif command_line_data.metadataFile == None:
358  if command_line_data.sourceDB != None and (command_line_data.inputTag == None or command_line_data.destinationTag == None or command_line_data.destinationDatabase == None):
359  basepath = command_line_data.sourceDB.rsplit('.db', 1)[0].rsplit('.txt', 1)[0]
360  basename = os.path.basename(basepath)
361  dataFilename = '%s.db' % basepath
362  metadataFilename = '%s.txt' % basepath
363 
364  # Data file
365  try:
366  with open(dataFilename, 'rb') as dataFile:
367  pass
368  except IOError as e:
369  errMsg = 'Impossible to open SQLite data file %s' %dataFilename
370  print( errMsg )
371  ret['status'] = -3
372  ret['error'] = errMsg
373  return ret
374 
375  # Metadata file
376 
377  command_line_data.sourceDB = dataFilename
378 
379  try:
380  with open(metadataFilename, 'rb') as metadataFile:
381  pass
382  except IOError as e:
383  if e.errno != errno.ENOENT:
384  errMsg = 'Impossible to open file %s (for other reason than not existing)' %metadataFilename
385  ret = {}
386  ret['status'] = -4
387  ret['error'] = errMsg
388  exit (ret)
389 
390  if getInput('y', '\nIt looks like the metadata file %s does not exist and not enough parameters were received in the command line. Do you want me to create it and help you fill it?\nAnswer [y]: ' % metadataFilename).lower() != 'y':
391  errMsg = 'Metadata file %s does not exist' %metadataFilename
392  ret = {}
393  ret['status'] = -5
394  ret['error'] = errMsg
395  exit(ret)
396  # Wizard
397  runWizard(basename, dataFilename, metadataFilename)
398  command_line_data.metadataFile = metadataFilename
399  else:
400  command_line_data.userText = command_line_data.userText\
401  if command_line_data.userText != None\
402  else str(raw_input("Tag's description [can be empty]:"))
403  metadata_dictionary = command_line_data.__dict__
404 
405  if command_line_data.metadataFile != None:
406  metadata_dictionary = json.loads("".join(open(os.path.abspath(command_line_data.metadataFile), "r").readlines()))
407  metadata_dictionary["username"] = username
408  metadata_dictionary["password"] = password
409  metadata_dictionary["userText"] = metadata_dictionary.get("userText")\
410  if metadata_dictionary.get("userText") != None\
411  else str(raw_input("Tag's description [can be empty]:"))
412 
413  # go through command line options and, if they are set, overwrite entries
414  for (option_name, option_value) in command_line_data.__dict__.items():
415  # if the metadata_dictionary sets this, overwrite it
416  if option_name != "destinationTags":
417  if option_value != None or (option_value == None and not(option_name in metadata_dictionary.keys())):
418  # if option_value has a value, override the metadata file entry
419  # or if option_value is None but the metadata file doesn't give a value,
420  # set the entry to None as well
421  metadata_dictionary[option_name] = option_value
422  else:
423  if option_value != {None:{}}:
424  metadata_dictionary["destinationTags"] = {option_value:{}}
425  elif option_value == {None:{}} and not("destinationTags" in metadata_dictionary.keys()):
426  metadata_dictionary["destinationTags"] = {None:{}}
427 
428  if command_line_data.review_options:
429  defaults = {
430  "since" : "Since of first IOV",
431  "userText" : "Populated by upload process",
432  "netrc" : "None given",
433  "fcsr_filter" : "Don't apply",
434  "hashToUse" : "Using local SQLite file instead"
435  }
436  print("Configuration to use for the upload:")
437  for key in metadata_dictionary:
438  if not(key) in ["username", "password", "destinationTag"]:
439  value_to_print = metadata_dictionary[key] if metadata_dictionary[key] != None else defaults[key]
440  print("\t%s : %s" % (key, value_to_print))
441 
442  if raw_input("\nDo you want to continue? [y/n] ") != "y":
443  exit()
444 
445  if metadata_dictionary["server"] == None:
446  if metadata_dictionary["destinationDatabase"] == "oracle://cms_orcoff_prep/CMS_CONDITIONS":
447  metadata_dictionary["server"] = server_alias_to_url["prep"]
448  else:
449  metadata_dictionary["server"] = server_alias_to_url["prod"]
450 
451  return metadata_dictionary
452 
453 def get_version(url):
454  query = url_query(url=url + "script_version/")
455  response = query.send()
456  return response
457 
458 
459 if __name__ == "__main__":
460 
461  upload_metadata = parse_arguments()
462 
463  # upload_metadata should be used to decide the service url
464  final_service_url = upload_metadata["server"]
465  try:
466  response = get_version(final_service_url)
467  server_version = json.loads(response)
468  except Exception as e:
469  print(horizontal_rule)
470  print(e)
471  print("Could not connect to server at %s"%final_service_url)
472  print("If you specified a server please check it is correct. If that is not the issue please contact the AlcaDB team.")
473  print(horizontal_rule)
474  exit(1)
475 
476  if server_version["version"] != __version__:
477  print(horizontal_rule)
478  print("Local upload script is different than server version. Please run the following command to get the latest script.")
479  print("curl --insecure -o uploadConditions.py %sget_upload_script/ && chmod +x uploadConditions.py;"%final_service_url)
480  print(horizontal_rule)
481  exit(1)
482 
483  import CondCore.Utilities.CondDBFW.data_sources as data_sources
484 
485  upload_metadata["sqlite_file"] = upload_metadata.get("sourceDB")
486 
487  try:
488  os.mkdir('upload_logs')
489  except OSError as e:
490  pass
491 
492  # make new dictionary, and copy over everything except "metadata_source"
493  upload_metadata_argument = {}
494  for (key, value) in upload_metadata.items():
495  if key != "metadata_source":
496  upload_metadata_argument[key] = value
497 
498  upload_metadata["metadata_source"] = data_sources.json_data_node.make(upload_metadata_argument)
499  try:
500  # pass dictionary as arguments to match keywords - the constructor has a **kwargs parameter to deal with stray arguments
501  run_upload(**upload_metadata)
502  print(horizontal_rule)
503  print("Process completed without issues. Please check logs for further details.")
504  print(horizontal_rule)
505  except SystemExit as e:
506  print(horizontal_rule)
507  print("Process exited abnormally. Please check logs for details.")
508  print(horizontal_rule)
509  exit(1)
510  exit(0)
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
static std::string join(char **cmd)
Definition: RemoteFile.cc:19
#define str(s)