CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Groups Pages
uploadConditions.py
Go to the documentation of this file.
1 #!/usr/bin/env python3
2 """
3 Primary Author:
4 Joshua Dawes - CERN, CMS - The University of Manchester
5 
6 Debugging, Integration and Maintenance:
7 Andres Cardenas - CERN, CMS - Universidad San Francisco
8 
9 Upload script wrapper - controls the automatic update system.
10 
11 Note: the name of the file follows a different convention to the others because it should be the same as the current upload script name.
12 
13 Takes user arguments and passes them to the main upload module CondDBFW.uploads, once the correct version exists.
14 
15 1. Ask the server corresponding to the database we're uploading to which version of CondDBFW it has (query the /conddbfw_version/ url).
16 2. Decide which directory that we can write to - either the current local directory, or /tmp/random_string/.
17 3. Pull the commit returned from the server into the directory from step 2.
18 4. Invoke the CondDBFW.uploads module with the arguments given to this script.
19 
20 """
21 
22 __version__ = 1
23 
24 #import pycurl
25 import requests
26 import urllib3
27 urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
28 try:
29  from StringIO import StringIO
30 except:
31  pass
32 import traceback
33 import sys
34 import os
35 import json
36 import subprocess
37 import argparse
38 import netrc
39 import shutil
40 import getpass
41 import errno
42 import sqlite3
43 
44 
45 horizontal_rule = "="*60
46 
47 def run_upload(**parameters):
48  """
49  Imports CondDBFW.uploads and runs the upload with the upload metadata obtained.
50  """
51  try:
52  import CondCore.Utilities.CondDBFW.uploads as uploads
53  except Exception as e:
54  traceback.print_exc()
55  exit("CondDBFW or one of its dependencies could not be imported.\n"\
56  + "If the CondDBFW directory exists, you are likely not in a CMSSW environment.")
57  # we have CondDBFW, so just call the module with the parameters given in the command line
58  uploader = uploads.uploader(**parameters)
59  result = uploader.upload()
60 
61 def getInput(default, prompt = ''):
62  '''Like raw_input() but with a default and automatic strip().
63  '''
64 
65  answer = raw_input(prompt)
66  if answer:
67  return answer.strip()
68 
69  return default.strip()
70 
71 
72 def getInputWorkflow(prompt = ''):
73  '''Like getInput() but tailored to get target workflows (synchronization options).
74  '''
75 
76  while True:
77  workflow = getInput(defaultWorkflow, prompt)
78 
79  if workflow in frozenset(['offline', 'hlt', 'express', 'prompt', 'pcl']):
80  return workflow
81 
82  print('Please specify one of the allowed workflows. See above for the explanation on each of them.')
83 
84 
85 def getInputChoose(optionsList, default, prompt = ''):
86  '''Makes the user choose from a list of options.
87  '''
88 
89  while True:
90  index = getInput(default, prompt)
91 
92  try:
93  return optionsList[int(index)]
94  except ValueError:
95  print('Please specify an index of the list (i.e. integer).')
96  except IndexError:
97  print('The index you provided is not in the given list.')
98 
99 
100 def getInputRepeat(prompt = ''):
101  '''Like raw_input() but repeats if nothing is provided and automatic strip().
102  '''
103 
104  while True:
105  answer = raw_input(prompt)
106  if answer:
107  return answer.strip()
108 
109  print('You need to provide a value.')
110 
111 def runWizard(basename, dataFilename, metadataFilename):
112  while True:
113  print('''\nWizard for metadata for %s
114 
115 I will ask you some questions to fill the metadata file. For some of the questions there are defaults between square brackets (i.e. []), leave empty (i.e. hit Enter) to use them.''' % basename)
116 
117  # Try to get the available inputTags
118  try:
119  dataConnection = sqlite3.connect(dataFilename)
120  dataCursor = dataConnection.cursor()
121  dataCursor.execute('select name from sqlite_master where type == "table"')
122  tables = set(zip(*dataCursor.fetchall())[0])
123 
124  # only conddb V2 supported...
125  if 'TAG' in tables:
126  dataCursor.execute('select NAME from TAG')
127  # In any other case, do not try to get the inputTags
128  else:
129  raise Exception()
130 
131  inputTags = dataCursor.fetchall()
132  if len(inputTags) == 0:
133  raise Exception()
134  inputTags = list(zip(*inputTags))[0]
135 
136  except Exception:
137  inputTags = []
138 
139  if len(inputTags) == 0:
140  print('\nI could not find any input tag in your data file, but you can still specify one manually.')
141 
142  inputTag = getInputRepeat(
143  '\nWhich is the input tag (i.e. the tag to be read from the SQLite data file)?\ne.g. BeamSpotObject_ByRun\ninputTag: ')
144 
145  else:
146  print('\nI found the following input tags in your SQLite data file:')
147  for (index, inputTag) in enumerate(inputTags):
148  print(' %s) %s' % (index, inputTag))
149 
150  inputTag = getInputChoose(inputTags, '0',
151  '\nWhich is the input tag (i.e. the tag to be read from the SQLite data file)?\ne.g. 0 (you select the first in the list)\ninputTag [0]: ')
152 
153  databases = {
154  'oraprod': 'oracle://cms_orcon_prod/CMS_CONDITIONS',
155  'prod': 'oracle://cms_orcon_prod/CMS_CONDITIONS',
156  'oradev': 'oracle://cms_orcoff_prep/CMS_CONDITIONS',
157  'prep': 'oracle://cms_orcoff_prep/CMS_CONDITIONS',
158  }
159 
160  destinationDatabase = ''
161  ntry = 0
162  print('\nWhich is the destination database where the tags should be exported?')
163  print('\n%s) %s' % ('oraprod', databases['oraprod']))
164  print('\n%s) %s' % ('oradev', databases['oradev']))
165 
166  while ( destinationDatabase not in databases.values() ):
167  if ntry==0:
168  inputMessage = \
169  '\nPossible choices: oraprod or oradev \ndestinationDatabase: '
170  elif ntry==1:
171  inputMessage = \
172  '\nPlease choose one of the two valid destinations: oraprod or oradev \ndestinationDatabase: '
173  else:
174  raise Exception('No valid destination chosen. Bailing out...')
175 
176  databaseInput = getInputRepeat(inputMessage).lower()
177  if databaseInput in databases.keys():
178  destinationDatabase = databases[databaseInput]
179  ntry += 1
180 
181  while True:
182  since = getInput('',
183  '\nWhich is the given since? (if not specified, the one from the SQLite data file will be taken -- note that even if specified, still this may not be the final since, depending on the synchronization options you select later: if the synchronization target is not offline, and the since you give is smaller than the next possible one (i.e. you give a run number earlier than the one which will be started/processed next in prompt/hlt/express), the DropBox will move the since ahead to go to the first safe run instead of the value you gave)\ne.g. 1234\nsince []: ')
184  if not since:
185  since = None
186  break
187  else:
188  try:
189  since = int(since)
190  break
191  except ValueError:
192  print('The since value has to be an integer or empty (null).')
193 
194  userText = getInput('',
195  '\nWrite any comments/text you may want to describe your request\ne.g. Muon alignment scenario for...\nuserText []: ')
196 
197  destinationTags = {}
198  while True:
199  destinationTag = getInput('',
200  '\nWhich is the next destination tag to be added (leave empty to stop)?\ne.g. BeamSpotObjects_PCL_byRun_v0_offline\ndestinationTag []: ')
201  if not destinationTag:
202  if len(destinationTags) == 0:
203  print('There must be at least one destination tag.')
204  continue
205  break
206 
207  if destinationTag in destinationTags:
208  print(
209  'You already added this destination tag. Overwriting the previous one with this new one.')
210 
211  destinationTags[destinationTag] = {
212  }
213 
214  metadata = {
215  'destinationDatabase': destinationDatabase,
216  'destinationTags': destinationTags,
217  'inputTag': inputTag,
218  'since': since,
219  'userText': userText,
220  }
221 
222  metadata = json.dumps(metadata, sort_keys=True, indent=4)
223  print('\nThis is the generated metadata:\n%s' % metadata)
224 
225  if getInput('n',
226  '\nIs it fine (i.e. save in %s and *upload* the conditions if this is the latest file)?\nAnswer [n]: ' % metadataFilename).lower() == 'y':
227  break
228  print('Saving generated metadata in %s...', metadataFilename)
229  with open(metadataFilename, 'wb') as metadataFile:
230  metadataFile.write(metadata)
231 
233  # read in command line arguments, and build metadata dictionary from them
234  parser = argparse.ArgumentParser(prog="cmsDbUpload client", description="CMS Conditions Upload Script in CondDBFW.")
235 
236  parser.add_argument("--sourceDB", type=str, help="DB to find Tags, IOVs + Payloads in.", required=False)
237 
238  # metadata arguments
239  parser.add_argument("--inputTag", type=str,\
240  help="Tag to take IOVs + Payloads from in --sourceDB.", required=False)
241  parser.add_argument("--destinationTag", type=str,\
242  help="Tag to copy IOVs + Payloads to in --destDB.", required=False)
243  parser.add_argument("--destinationDatabase", type=str,\
244  help="Database to copy IOVs + Payloads to.", required=False)
245  parser.add_argument("--since", type=int,\
246  help="Since to take IOVs from.", required=False)
247  parser.add_argument("--userText", type=str,\
248  help="Description of --destTag (can be empty).")
249 
250  # non-metadata arguments
251  parser.add_argument("--metadataFile", "-m", type=str, help="Metadata file to take metadata from.", required=False)
252 
253  parser.add_argument("--debug", required=False, action="store_true")
254  parser.add_argument("--verbose", required=False, action="store_true")
255  parser.add_argument("--testing", required=False, action="store_true")
256  parser.add_argument("--fcsr-filter", type=str, help="Synchronization to take FCSR from for local filtering of IOVs.", required=False)
257 
258  parser.add_argument("--netrc", required=False)
259 
260  parser.add_argument("--hashToUse", required=False)
261 
262  parser.add_argument("--server", required=False)
263 
264  parser.add_argument("--review-options", required=False, action="store_true")
265 
266  parser.add_argument("--replay-file", required=False)
267 
268  command_line_data = parser.parse_args()
269 
270  if command_line_data.replay_file:
271  dictionary = json.loads("".join(open(command_line_data.replay_file, "r").readlines()))
272  command_line_data.tier0_response = dictionary["tier0_response"]
273 
274  # default is the production server, which can point to either database anyway
275  server_alias_to_url = {
276  "prep" : "https://cms-conddb-dev.cern.ch/cmsDbCondUpload/",
277  "dev" : "https://cms-conddb-dev.cern.ch/cmsDbCondUpload/",
278  "prod" : "https://cms-conddb.cern.ch/cmsDbCondUpload/"
279  }
280 
281  # if prep, prod or None were given, convert to URLs in dictionary server_alias_to_url
282  # if not, assume a URL has been given and use this instead
283  if command_line_data.server in server_alias_to_url.keys():
284  command_line_data.server = server_alias_to_url[command_line_data.server]
285 
286  # resolve destination databases
287  database_alias_to_connection = {
288  "prep": "oracle://cms_orcoff_prep/CMS_CONDITIONS",
289  "dev": "oracle://cms_orcoff_prep/CMS_CONDITIONS",
290  "prod": "oracle://cms_orcon_adg/CMS_CONDITIONS"
291  }
292 
293  if command_line_data.destinationDatabase in database_alias_to_connection.keys():
294  command_line_data.destinationDatabase = database_alias_to_connection[command_line_data.destinationDatabase]
295 
296 
297  # use netrc to get username and password
298  try:
299  netrc_file = command_line_data.netrc
300  netrc_authenticators = netrc.netrc(netrc_file).authenticators("ConditionUploader")
301  if netrc_authenticators == None:
302  print("Your netrc file must contain the key 'ConditionUploader'.")
303  manual_input = raw_input("Do you want to try to type your credentials? ")
304  if manual_input == "y":
305  # ask for username and password
306  username = raw_input("Username: ")
307  password = getpass.getpass("Password: ")
308  else:
309  exit()
310  else:
311  print("Read your credentials from ~/.netrc. If you want to use a different file, supply its name with the --netrc argument.")
312  username = netrc_authenticators[0]
313  password = netrc_authenticators[2]
314  except:
315  print("Couldn't obtain your credentials (either from netrc or manual input).")
316  exit()
317 
318  command_line_data.username = username
319  command_line_data.password = password
320  # this will be used as the final destinationTags value by all input methods
321  # apart from the metadata file
322  command_line_data.destinationTags = {command_line_data.destinationTag:{}}
323 
324  """
325  Construct metadata_dictionary:
326  Currently, this is 3 cases:
327 
328  1) An IOV is being appended to an existing Tag with an existing Payload.
329  In this case, we just take all data from the command line.
330 
331  2) No metadata file is given, so we assume that ALL upload metadata is coming from the command line.
332 
333  3) A metadata file is given, hence we parse the file, and then iterate through command line arguments
334  since these override the options set in the metadata file.
335 
336  """
337 
338  # Hash to use, entirely from command line
339  if command_line_data.hashToUse != None:
340  command_line_data.userText = ""
341  metadata_dictionary = command_line_data.__dict__
342  elif command_line_data.metadataFile == None:
343  if command_line_data.sourceDB != None and (command_line_data.inputTag == None or command_line_data.destinationTag == None or command_line_data.destinationDatabase == None):
344  basepath = command_line_data.sourceDB.rsplit('.db', 1)[0].rsplit('.txt', 1)[0]
345  basename = os.path.basename(basepath)
346  dataFilename = '%s.db' % basepath
347  metadataFilename = '%s.txt' % basepath
348  # Data file
349  try:
350  with open(dataFilename, 'rb') as dataFile:
351  pass
352  except IOError as e:
353  errMsg = 'Impossible to open SQLite data file %s' %dataFilename
354  print( errMsg )
355  ret['status'] = -3
356  ret['error'] = errMsg
357  return ret
358 
359  # Metadata file
360 
361  try:
362  with open(metadataFilename, 'rb') as metadataFile:
363  pass
364  except IOError as e:
365  if e.errno != errno.ENOENT:
366  errMsg = 'Impossible to open file %s (for other reason than not existing)' %metadataFilename
367  ret = {}
368  ret['status'] = -4
369  ret['error'] = errMsg
370  exit (ret)
371 
372  if getInput('y', '\nIt looks like the metadata file %s does not exist and not enough parameters were received in the command line. Do you want me to create it and help you fill it?\nAnswer [y]: ' % metadataFilename).lower() != 'y':
373  errMsg = 'Metadata file %s does not exist' %metadataFilename
374  ret = {}
375  ret['status'] = -5
376  ret['error'] = errMsg
377  exit(ret)
378  # Wizard
379  runWizard(basename, dataFilename, metadataFilename)
380  command_line_data.metadataFile = metadataFilename
381  else:
382  command_line_data.userText = command_line_data.userText\
383  if command_line_data.userText != None\
384  else str(raw_input("Tag's description [can be empty]:"))
385  metadata_dictionary = command_line_data.__dict__
386 
387  if command_line_data.metadataFile != None:
388  metadata_dictionary = json.loads("".join(open(os.path.abspath(command_line_data.metadataFile), "r").readlines()))
389  metadata_dictionary["username"] = username
390  metadata_dictionary["password"] = password
391  metadata_dictionary["userText"] = metadata_dictionary.get("userText")\
392  if metadata_dictionary.get("userText") != None\
393  else str(raw_input("Tag's description [can be empty]:"))
394 
395  # go through command line options and, if they are set, overwrite entries
396  for (option_name, option_value) in command_line_data.__dict__.items():
397  # if the metadata_dictionary sets this, overwrite it
398  if option_name != "destinationTags":
399  if option_value != None or (option_value == None and not(option_name in metadata_dictionary.keys())):
400  # if option_value has a value, override the metadata file entry
401  # or if option_value is None but the metadata file doesn't give a value,
402  # set the entry to None as well
403  metadata_dictionary[option_name] = option_value
404  else:
405  if option_value != {None:{}}:
406  metadata_dictionary["destinationTags"] = {option_value:{}}
407  elif option_value == {None:{}} and not("destinationTags" in metadata_dictionary.keys()):
408  metadata_dictionary["destinationTags"] = {None:{}}
409 
410  if command_line_data.review_options:
411  defaults = {
412  "since" : "Since of first IOV",
413  "userText" : "Populated by upload process",
414  "netrc" : "None given",
415  "fcsr_filter" : "Don't apply",
416  "hashToUse" : "Using local SQLite file instead"
417  }
418  print("Configuration to use for the upload:")
419  for key in metadata_dictionary:
420  if not(key) in ["username", "password", "destinationTag"]:
421  value_to_print = metadata_dictionary[key] if metadata_dictionary[key] != None else defaults[key]
422  print("\t%s : %s" % (key, value_to_print))
423 
424  if raw_input("\nDo you want to continue? [y/n] ") != "y":
425  exit()
426 
427  if metadata_dictionary["server"] == None:
428  if metadata_dictionary["destinationDatabase"] == "oracle://cms_orcoff_prep/CMS_CONDITIONS":
429  metadata_dictionary["server"] = server_alias_to_url["prep"]
430  else:
431  metadata_dictionary["server"] = server_alias_to_url["prod"]
432 
433  return metadata_dictionary
434 
435 def get_version(url):
436  return requests.get(url + "script_version/", verify=False)
437 
438 
439 if __name__ == "__main__":
440 
441  upload_metadata = parse_arguments()
442 
443  # upload_metadata should be used to decide the service url
444  final_service_url = upload_metadata["server"]
445  try:
446  response = get_version(final_service_url)
447  server_version = response.json()
448  except Exception as e:
449  print(horizontal_rule)
450  print(e)
451  print("Could not connect to server at %s"%final_service_url)
452  print("If you specified a server please check it is correct. If that is not the issue please contact the AlcaDB team.")
453  print(horizontal_rule)
454  exit(1)
455 
456  if server_version["version"] != __version__:
457  print(horizontal_rule)
458  print("Local upload script is different than server version. Please run the following command to get the latest script.")
459  print("curl --insecure -o uploadConditions.py %sget_upload_script/ && chmod +x uploadConditions.py;"%final_service_url)
460  print(horizontal_rule)
461  exit(1)
462 
463  import CondCore.Utilities.CondDBFW.data_sources as data_sources
464 
465  upload_metadata["sqlite_file"] = upload_metadata.get("sourceDB")
466 
467  try:
468  os.mkdir('upload_logs')
469  except OSError as e:
470  pass
471 
472  # make new dictionary, and copy over everything except "metadata_source"
473  upload_metadata_argument = {}
474  for (key, value) in upload_metadata.items():
475  if key != "metadata_source":
476  upload_metadata_argument[key] = value
477 
478  upload_metadata["metadata_source"] = data_sources.json_data_node.make(upload_metadata_argument)
479  try:
480  # pass dictionary as arguments to match keywords - the constructor has a **kwargs parameter to deal with stray arguments
481  run_upload(**upload_metadata)
482  print(horizontal_rule)
483  print("Process completed without issues. Please check logs for further details.")
484  print(horizontal_rule)
485  except SystemExit as e:
486  print(horizontal_rule)
487  print("Process exited abnormally. Please check logs for details.")
488  print(horizontal_rule)
489  exit(1)
490  exit(0)
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
static std::string join(char **cmd)
Definition: RemoteFile.cc:19
#define str(s)