CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Groups Pages
Classes | Functions | Variables
uploadConditions Namespace Reference

Classes

class  ConditionsUploader
 
class  HTTP
 
class  HTTPError
 

Functions

def addToTarFile
 
def get_version
 
def getCredentials
 
def getInput
 
def getInputChoose
 
def getInputRepeat
 
def getInputWorkflow
 
def main
 
def parse_arguments
 
def re_upload
 
def run_upload
 
def runWizard
 
def testTier0Upload
 
def upload
 
def uploadAllFiles
 
def uploadTier0Files
 

Variables

string __author__ = 'Andreas Pfeiffer'
 
string __copyright__ = 'Copyright 2015, CERN CMS'
 
list __credits__ = ['Giacomo Govi', 'Salvatore Di Guida', 'Miguel Ojeda', 'Andreas Pfeiffer']
 
string __email__ = 'giacomo.govi@cern.ch'
 
string __license__ = 'Unknown'
 
string __maintainer__ = 'Giacomo Govi'
 
int __version__ = 1
 
string authPathEnvVar = 'COND_AUTH_PATH'
 
string CERN_SSO_CURL_CAPATH = '/etc/pki/tls/certs'
 
string defaultBackend = 'online'
 
string defaultDevHostname = 'cms-conddb-dev.cern.ch'
 
string defaultHostname = 'cms-conddb-prod.cern.ch'
 
string defaultNetrcHost = 'ConditionUploader'
 
string defaultTemporaryFile = 'upload.tar.bz2'
 
string defaultUrlTemplate = 'https://%s/cmsDbUpload/'
 
string defaultWorkflow = 'offline'
 
string devLogDbSrv = 'cms_orcoff_prep'
 
list final_service_url = upload_metadata["server"]
 
string horizontal_rule = "="
 
string logDbSchema = 'CMS_COND_DROPBOX'
 
string prodLogDbSrv = 'cms_orcoff_prod'
 
tuple response = get_version(final_service_url)
 
tuple server_version = response.json()
 
tuple upload_metadata = parse_arguments()
 
dictionary upload_metadata_argument = {}
 
int waitForRetry = 15
 

Function Documentation

def uploadConditions.addToTarFile (   tarFile,
  fileobj,
  arcname 
)

Definition at line 428 of file uploadConditions.py.

Referenced by uploadConditions.ConditionsUploader.uploadFile().

429 def addToTarFile(tarFile, fileobj, arcname):
430  tarInfo = tarFile.gettarinfo(fileobj = fileobj, arcname = arcname)
431  tarInfo.mode = 0o400
432  tarInfo.uid = tarInfo.gid = tarInfo.mtime = 0
433  tarInfo.uname = tarInfo.gname = 'root'
434  tarFile.addfile(tarInfo, fileobj)
def uploadConditions.get_version (   url)

Definition at line 435 of file uploadConditions.py.

Referenced by ValidationMatrix_v2.ReleaseComparison.compare().

436 def get_version(url):
437  return requests.get(url + "script_version/", verify=False)
438 
def uploadConditions.getCredentials (   options)

Definition at line 617 of file uploadConditions.py.

References getInput().

Referenced by uploadAllFiles().

618 def getCredentials( options ):
619 
620  username = None
621  password = None
622  netrcPath = None
623  if authPathEnvVar in os.environ:
624  authPath = os.environ[authPathEnvVar]
625  netrcPath = os.path.join(authPath,'.netrc')
626  if options.authPath is not None:
627  netrcPath = os.path.join( options.authPath,'.netrc' )
628  try:
629  # Try to find the netrc entry
630  (username, account, password) = netrc.netrc( netrcPath ).authenticators(options.netrcHost)
631  except Exception:
632  # netrc entry not found, ask for the username and password
633  logging.info(
634  'netrc entry "%s" not found: if you wish not to have to retype your password, you can add an entry in your .netrc file. However, beware of the risks of having your password stored as plaintext. Instead.',
635  options.netrcHost)
636 
637  # Try to get a default username
638  defaultUsername = getpass.getuser()
639  if defaultUsername is None:
640  defaultUsername = '(not found)'
641 
642  username = getInput(defaultUsername, '\nUsername [%s]: ' % defaultUsername)
643  password = getpass.getpass('Password: ')
644 
645  return username, password
646 
def uploadConditions.getInput (   default,
  prompt = '' 
)
Like raw_input() but with a default and automatic strip().
Like input() but with a default and automatic strip().

Definition at line 61 of file uploadConditions.py.

Referenced by getCredentials(), getInputChoose(), getInputWorkflow(), parse_arguments(), runWizard(), and uploadAllFiles().

61 
62 def getInput(default, prompt = ''):
63  '''Like raw_input() but with a default and automatic strip().
64  '''
65 
66  answer = raw_input(prompt)
67  if answer:
68  return answer.strip()
69 
70  return default.strip()
71 
def uploadConditions.getInputChoose (   optionsList,
  default,
  prompt = '' 
)
Makes the user choose from a list of options.

Definition at line 85 of file uploadConditions.py.

References getInput(), and print().

Referenced by runWizard().

85 
86 def getInputChoose(optionsList, default, prompt = ''):
87  '''Makes the user choose from a list of options.
88  '''
89 
90  while True:
91  index = getInput(default, prompt)
92 
93  try:
94  return optionsList[int(index)]
95  except ValueError:
96  print('Please specify an index of the list (i.e. integer).')
97  except IndexError:
98  print('The index you provided is not in the given list.')
99 
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
def uploadConditions.getInputRepeat (   prompt = '')
Like raw_input() but repeats if nothing is provided and automatic strip().
Like input() but repeats if nothing is provided and automatic strip().

Definition at line 100 of file uploadConditions.py.

References print().

Referenced by runWizard().

101 def getInputRepeat(prompt = ''):
102  '''Like raw_input() but repeats if nothing is provided and automatic strip().
103  '''
104 
105  while True:
106  answer = raw_input(prompt)
107  if answer:
108  return answer.strip()
109 
110  print('You need to provide a value.')
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
def uploadConditions.getInputWorkflow (   prompt = '')
Like getInput() but tailored to get target workflows (synchronization options).

Definition at line 72 of file uploadConditions.py.

References getInput(), and print().

72 
73 def getInputWorkflow(prompt = ''):
74  '''Like getInput() but tailored to get target workflows (synchronization options).
75  '''
76 
77  while True:
78  workflow = getInput(defaultWorkflow, prompt)
79 
80  if workflow in frozenset(['offline', 'hlt', 'express', 'prompt', 'pcl']):
81  return workflow
82 
83  print('Please specify one of the allowed workflows. See above for the explanation on each of them.')
84 
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
def uploadConditions.main ( )
Entry point.

Definition at line 886 of file uploadConditions.py.

References print(), re_upload(), and upload().

887 def main():
888  '''Entry point.
889  '''
890 
891  parser = optparse.OptionParser(usage =
892  'Usage: %prog [options] <file> [<file> ...]\n'
893  )
894 
895  parser.add_option('-d', '--debug',
896  dest = 'debug',
897  action="store_true",
898  default = False,
899  help = 'Switch on printing debug information. Default: %default',
900  )
901 
902  parser.add_option('-b', '--backend',
903  dest = 'backend',
904  default = defaultBackend,
905  help = 'dropBox\'s backend to upload to. Default: %default',
906  )
907 
908  parser.add_option('-H', '--hostname',
909  dest = 'hostname',
910  default = defaultHostname,
911  help = 'dropBox\'s hostname. Default: %default',
912  )
913 
914  parser.add_option('-u', '--urlTemplate',
915  dest = 'urlTemplate',
916  default = defaultUrlTemplate,
917  help = 'dropBox\'s URL template. Default: %default',
918  )
919 
920  parser.add_option('-f', '--temporaryFile',
921  dest = 'temporaryFile',
922  default = defaultTemporaryFile,
923  help = 'Temporary file that will be used to store the first tar file. Note that it then will be moved to a file with the hash of the file as its name, so there will be two temporary files created in fact. Default: %default',
924  )
925 
926  parser.add_option('-n', '--netrcHost',
927  dest = 'netrcHost',
928  default = defaultNetrcHost,
929  help = 'The netrc host (machine) from where the username and password will be read. Default: %default',
930  )
931 
932  parser.add_option('-a', '--authPath',
933  dest = 'authPath',
934  default = None,
935  help = 'The path of the .netrc file for the authentication. Default: $HOME',
936  )
937 
938  parser.add_option('-r', '--reUpload',
939  dest = 'reUpload',
940  default = None,
941  help = 'The hash of the file to upload again.',
942  )
943 
944  (options, arguments) = parser.parse_args()
945 
946  logLevel = logging.INFO
947  if options.debug:
948  logLevel = logging.DEBUG
949  logging.basicConfig(
950  format = '[%(asctime)s] %(levelname)s: %(message)s',
951  level = logLevel,
952  )
953 
954  if len(arguments) < 1:
955  if options.reUpload is None:
956  parser.print_help()
957  return -2
958  else:
959  return re_upload(options)
960  if options.reUpload is not None:
961  print("ERROR: options -r can't be specified on a new file upload.")
962  return -2
963 
964  return upload(options, arguments)
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
def uploadConditions.parse_arguments ( )

Definition at line 232 of file uploadConditions.py.

References beamvalidation.exit(), getInput(), join(), print(), runWizard(), and str.

233 def parse_arguments():
234  # read in command line arguments, and build metadata dictionary from them
235  parser = argparse.ArgumentParser(prog="cmsDbUpload client", description="CMS Conditions Upload Script in CondDBFW.")
236 
237  parser.add_argument("--sourceDB", type=str, help="DB to find Tags, IOVs + Payloads in.", required=False)
238 
239  # metadata arguments
240  parser.add_argument("--inputTag", type=str,\
241  help="Tag to take IOVs + Payloads from in --sourceDB.", required=False)
242  parser.add_argument("--destinationTag", type=str,\
243  help="Tag to copy IOVs + Payloads to in --destDB.", required=False)
244  parser.add_argument("--destinationDatabase", type=str,\
245  help="Database to copy IOVs + Payloads to.", required=False)
246  parser.add_argument("--since", type=int,\
247  help="Since to take IOVs from.", required=False)
248  parser.add_argument("--userText", type=str,\
249  help="Description of --destTag (can be empty).")
250 
251  # non-metadata arguments
252  parser.add_argument("--metadataFile", "-m", type=str, help="Metadata file to take metadata from.", required=False)
253 
254  parser.add_argument("--debug", required=False, action="store_true")
255  parser.add_argument("--verbose", required=False, action="store_true")
256  parser.add_argument("--testing", required=False, action="store_true")
257  parser.add_argument("--fcsr-filter", type=str, help="Synchronization to take FCSR from for local filtering of IOVs.", required=False)
258 
259  parser.add_argument("--netrc", required=False)
260 
261  parser.add_argument("--hashToUse", required=False)
262 
263  parser.add_argument("--server", required=False)
264 
265  parser.add_argument("--review-options", required=False, action="store_true")
266 
267  parser.add_argument("--replay-file", required=False)
268 
269  command_line_data = parser.parse_args()
270 
271  if command_line_data.replay_file:
272  dictionary = json.loads("".join(open(command_line_data.replay_file, "r").readlines()))
273  command_line_data.tier0_response = dictionary["tier0_response"]
274 
275  # default is the production server, which can point to either database anyway
276  server_alias_to_url = {
277  "prep" : "https://cms-conddb-dev.cern.ch/cmsDbCondUpload/",
278  "dev" : "https://cms-conddb-dev.cern.ch/cmsDbCondUpload/",
279  "prod" : "https://cms-conddb.cern.ch/cmsDbCondUpload/"
280  }
281 
282  # if prep, prod or None were given, convert to URLs in dictionary server_alias_to_url
283  # if not, assume a URL has been given and use this instead
284  if command_line_data.server in server_alias_to_url.keys():
285  command_line_data.server = server_alias_to_url[command_line_data.server]
286 
287  # resolve destination databases
288  database_alias_to_connection = {
289  "prep": "oracle://cms_orcoff_prep/CMS_CONDITIONS",
290  "dev": "oracle://cms_orcoff_prep/CMS_CONDITIONS",
291  "prod": "oracle://cms_orcon_adg/CMS_CONDITIONS"
292  }
293 
294  if command_line_data.destinationDatabase in database_alias_to_connection.keys():
295  command_line_data.destinationDatabase = database_alias_to_connection[command_line_data.destinationDatabase]
296 
297 
298  # use netrc to get username and password
299  try:
300  netrc_file = command_line_data.netrc
301  netrc_authenticators = netrc.netrc(netrc_file).authenticators("ConditionUploader")
302  if netrc_authenticators == None:
303  print("Your netrc file must contain the key 'ConditionUploader'.")
304  manual_input = raw_input("Do you want to try to type your credentials? ")
305  if manual_input == "y":
306  # ask for username and password
307  username = raw_input("Username: ")
308  password = getpass.getpass("Password: ")
309  else:
310  exit()
311  else:
312  print("Read your credentials from ~/.netrc. If you want to use a different file, supply its name with the --netrc argument.")
313  username = netrc_authenticators[0]
314  password = netrc_authenticators[2]
315  except:
316  print("Couldn't obtain your credentials (either from netrc or manual input).")
317  exit()
318 
319  command_line_data.username = username
320  command_line_data.password = password
321  # this will be used as the final destinationTags value by all input methods
322  # apart from the metadata file
323  command_line_data.destinationTags = {command_line_data.destinationTag:{}}
324 
325  """
326  Construct metadata_dictionary:
327  Currently, this is 3 cases:
328 
329  1) An IOV is being appended to an existing Tag with an existing Payload.
330  In this case, we just take all data from the command line.
331 
332  2) No metadata file is given, so we assume that ALL upload metadata is coming from the command line.
333 
334  3) A metadata file is given, hence we parse the file, and then iterate through command line arguments
335  since these override the options set in the metadata file.
336 
337  """
338 
339  # Hash to use, entirely from command line
340  if command_line_data.hashToUse != None:
341  command_line_data.userText = ""
342  metadata_dictionary = command_line_data.__dict__
343  elif command_line_data.metadataFile == None:
344  if command_line_data.sourceDB != None and (command_line_data.inputTag == None or command_line_data.destinationTag == None or command_line_data.destinationDatabase == None):
345  basepath = command_line_data.sourceDB.rsplit('.db', 1)[0].rsplit('.txt', 1)[0]
346  basename = os.path.basename(basepath)
347  dataFilename = '%s.db' % basepath
348  metadataFilename = '%s.txt' % basepath
349  # Data file
350  try:
351  with open(dataFilename, 'rb') as dataFile:
352  pass
353  except IOError as e:
354  errMsg = 'Impossible to open SQLite data file %s' %dataFilename
355  print( errMsg )
356  ret['status'] = -3
357  ret['error'] = errMsg
358  return ret
359 
360  # Metadata file
361 
362  try:
363  with open(metadataFilename, 'rb') as metadataFile:
364  pass
365  except IOError as e:
366  if e.errno != errno.ENOENT:
367  errMsg = 'Impossible to open file %s (for other reason than not existing)' %metadataFilename
368  ret = {}
369  ret['status'] = -4
370  ret['error'] = errMsg
371  exit (ret)
372 
373  if getInput('y', '\nIt looks like the metadata file %s does not exist and not enough parameters were received in the command line. Do you want me to create it and help you fill it?\nAnswer [y]: ' % metadataFilename).lower() != 'y':
374  errMsg = 'Metadata file %s does not exist' %metadataFilename
375  ret = {}
376  ret['status'] = -5
377  ret['error'] = errMsg
378  exit(ret)
379  # Wizard
380  runWizard(basename, dataFilename, metadataFilename)
381  command_line_data.metadataFile = metadataFilename
382  else:
383  command_line_data.userText = command_line_data.userText\
384  if command_line_data.userText != None\
385  else str(raw_input("Tag's description [can be empty]:"))
386  metadata_dictionary = command_line_data.__dict__
387 
388  if command_line_data.metadataFile != None:
389  metadata_dictionary = json.loads("".join(open(os.path.abspath(command_line_data.metadataFile), "r").readlines()))
390  metadata_dictionary["username"] = username
391  metadata_dictionary["password"] = password
392  metadata_dictionary["userText"] = metadata_dictionary.get("userText")\
393  if metadata_dictionary.get("userText") != None\
394  else str(raw_input("Tag's description [can be empty]:"))
395 
396  # go through command line options and, if they are set, overwrite entries
397  for (option_name, option_value) in command_line_data.__dict__.items():
398  # if the metadata_dictionary sets this, overwrite it
399  if option_name != "destinationTags":
400  if option_value != None or (option_value == None and not(option_name in metadata_dictionary.keys())):
401  # if option_value has a value, override the metadata file entry
402  # or if option_value is None but the metadata file doesn't give a value,
403  # set the entry to None as well
404  metadata_dictionary[option_name] = option_value
405  else:
406  if option_value != {None:{}}:
407  metadata_dictionary["destinationTags"] = {option_value:{}}
408  elif option_value == {None:{}} and not("destinationTags" in metadata_dictionary.keys()):
409  metadata_dictionary["destinationTags"] = {None:{}}
410 
411  if command_line_data.review_options:
412  defaults = {
413  "since" : "Since of first IOV",
414  "userText" : "Populated by upload process",
415  "netrc" : "None given",
416  "fcsr_filter" : "Don't apply",
417  "hashToUse" : "Using local SQLite file instead"
418  }
419  print("Configuration to use for the upload:")
420  for key in metadata_dictionary:
421  if not(key) in ["username", "password", "destinationTag"]:
422  value_to_print = metadata_dictionary[key] if metadata_dictionary[key] != None else defaults[key]
423  print("\t%s : %s" % (key, value_to_print))
424 
425  if raw_input("\nDo you want to continue? [y/n] ") != "y":
426  exit()
427 
428  if metadata_dictionary["server"] == None:
429  if metadata_dictionary["destinationDatabase"] == "oracle://cms_orcoff_prep/CMS_CONDITIONS":
430  metadata_dictionary["server"] = server_alias_to_url["prep"]
431  else:
432  metadata_dictionary["server"] = server_alias_to_url["prod"]
433 
434  return metadata_dictionary
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
static std::string join(char **cmd)
Definition: RemoteFile.cc:19
#define str(s)
def uploadConditions.re_upload (   options)

Definition at line 802 of file uploadConditions.py.

References edm.decode(), SiPixelLorentzAngle_cfi.read, str, and upload().

Referenced by main().

803 def re_upload( options ):
804  netrcPath = None
805  logDbSrv = prodLogDbSrv
806  if options.hostname == defaultDevHostname:
807  logDbSrv = devLogDbSrv
808  if options.authPath is not None:
809  netrcPath = os.path.join( options.authPath,'.netrc' )
810  try:
811  netrcKey = '%s/%s' %(logDbSrv,logDbSchema)
812  # Try to find the netrc entry
813  (username, account, password) = netrc.netrc( netrcPath ).authenticators( netrcKey )
814  except IOError as e:
815  logging.error('Cannot access netrc file.')
816  return 1
817  except Exception as e:
818  logging.error('Netrc file is invalid: %s' %str(e))
819  return 1
820  conStr = '%s/%s@%s' %(username,password,logDbSrv)
821  con = cx_Oracle.connect( conStr )
822  cur = con.cursor()
823  fh = options.reUpload
824  cur.execute('SELECT FILECONTENT, STATE FROM FILES WHERE FILEHASH = :HASH',{'HASH':fh})
825  res = cur.fetchall()
826  found = False
827  fdata = None
828  for r in res:
829  found = True
830  logging.info("Found file %s in state '%s;" %(fh,r[1]))
831  fdata = r[0].read().decode('bz2')
832  con.close()
833  if not found:
834  logging.error("No file uploaded found with hash %s" %fh)
835  return 1
836  # writing as a tar file and open it ( is there a why to open it in memory?)
837  fname = '%s.tar' %fh
838  with open(fname, "wb" ) as f:
839  f.write(fdata)
840  rname = 'reupload_%s' %fh
841  with tarfile.open(fname) as tar:
842  tar.extractall()
843  os.remove(fname)
844  dfile = 'data.db'
845  mdfile = 'metadata.txt'
846  if os.path.exists(dfile):
847  os.utime(dfile,None)
848  os.chmod(dfile,0o755)
849  os.rename(dfile,'%s.db' %rname)
850  else:
851  logging.error('Tar file does not contain the data file')
852  return 1
853  if os.path.exists(mdfile):
854  os.utime(mdfile,None)
855  os.chmod(mdfile,0o755)
856  mdata = None
857  with open(mdfile) as md:
858  mdata = json.load(md)
859  datelabel = datetime.now().strftime("%y-%m-%d %H:%M:%S")
860  if mdata is None:
861  logging.error('Metadata file is empty.')
862  return 1
863  logging.debug('Preparing new metadata file...')
864  mdata['userText'] = 'reupload %s : %s' %(datelabel,mdata['userText'])
865  with open( '%s.txt' %rname, 'wb') as jf:
866  jf.write( json.dumps( mdata, sort_keys=True, indent = 2 ) )
867  jf.write('\n')
868  os.remove(mdfile)
869  else:
870  logging.error('Tar file does not contain the metadata file')
871  return 1
872  logging.info('Files %s prepared for the upload.' %rname)
873  arguments = [rname]
874  return upload(options, arguments)
bool decode(bool &, std::string const &)
Definition: types.cc:71
#define str(s)
def uploadConditions.run_upload (   parameters)
Imports CondDBFW.uploads and runs the upload with the upload metadata obtained.

Definition at line 47 of file uploadConditions.py.

References beamvalidation.exit().

47 
48 def run_upload(**parameters):
49  """
50  Imports CondDBFW.uploads and runs the upload with the upload metadata obtained.
51  """
52  try:
53  import CondCore.Utilities.CondDBFW.uploads as uploads
54  except Exception as e:
55  traceback.print_exc()
56  exit("CondDBFW or one of its dependencies could not be imported.\n"\
57  + "If the CondDBFW directory exists, you are likely not in a CMSSW environment.")
58  # we have CondDBFW, so just call the module with the parameters given in the command line
59  uploader = uploads.uploader(**parameters)
60  result = uploader.upload()
def uploadConditions.runWizard (   basename,
  dataFilename,
  metadataFilename 
)

Definition at line 111 of file uploadConditions.py.

References getInput(), getInputChoose(), getInputRepeat(), print(), and ComparisonHelper.zip().

Referenced by parse_arguments(), and uploadAllFiles().

112 def runWizard(basename, dataFilename, metadataFilename):
113  while True:
114  print('''\nWizard for metadata for %s
115 
116 I will ask you some questions to fill the metadata file. For some of the questions there are defaults between square brackets (i.e. []), leave empty (i.e. hit Enter) to use them.''' % basename)
117 
118  # Try to get the available inputTags
119  try:
120  dataConnection = sqlite3.connect(dataFilename)
121  dataCursor = dataConnection.cursor()
122  dataCursor.execute('select name from sqlite_master where type == "table"')
123  tables = set(zip(*dataCursor.fetchall())[0])
124 
125  # only conddb V2 supported...
126  if 'TAG' in tables:
127  dataCursor.execute('select NAME from TAG')
128  # In any other case, do not try to get the inputTags
129  else:
130  raise Exception()
131 
132  inputTags = dataCursor.fetchall()
133  if len(inputTags) == 0:
134  raise Exception()
135  inputTags = list(zip(*inputTags))[0]
136 
137  except Exception:
138  inputTags = []
139 
140  if len(inputTags) == 0:
141  print('\nI could not find any input tag in your data file, but you can still specify one manually.')
142 
143  inputTag = getInputRepeat(
144  '\nWhich is the input tag (i.e. the tag to be read from the SQLite data file)?\ne.g. BeamSpotObject_ByRun\ninputTag: ')
145 
146  else:
147  print('\nI found the following input tags in your SQLite data file:')
148  for (index, inputTag) in enumerate(inputTags):
149  print(' %s) %s' % (index, inputTag))
150 
151  inputTag = getInputChoose(inputTags, '0',
152  '\nWhich is the input tag (i.e. the tag to be read from the SQLite data file)?\ne.g. 0 (you select the first in the list)\ninputTag [0]: ')
153 
154  databases = {
155  'oraprod': 'oracle://cms_orcon_prod/CMS_CONDITIONS',
156  'prod': 'oracle://cms_orcon_prod/CMS_CONDITIONS',
157  'oradev': 'oracle://cms_orcoff_prep/CMS_CONDITIONS',
158  'prep': 'oracle://cms_orcoff_prep/CMS_CONDITIONS',
159  }
160 
161  destinationDatabase = ''
162  ntry = 0
163  print('\nWhich is the destination database where the tags should be exported?')
164  print('\n%s) %s' % ('oraprod', databases['oraprod']))
165  print('\n%s) %s' % ('oradev', databases['oradev']))
166 
167  while ( destinationDatabase not in databases.values() ):
168  if ntry==0:
169  inputMessage = \
170  '\nPossible choices: oraprod or oradev \ndestinationDatabase: '
171  elif ntry==1:
172  inputMessage = \
173  '\nPlease choose one of the two valid destinations: oraprod or oradev \ndestinationDatabase: '
174  else:
175  raise Exception('No valid destination chosen. Bailing out...')
176 
177  databaseInput = getInputRepeat(inputMessage).lower()
178  if databaseInput in databases.keys():
179  destinationDatabase = databases[databaseInput]
180  ntry += 1
181 
182  while True:
183  since = getInput('',
184  '\nWhich is the given since? (if not specified, the one from the SQLite data file will be taken -- note that even if specified, still this may not be the final since, depending on the synchronization options you select later: if the synchronization target is not offline, and the since you give is smaller than the next possible one (i.e. you give a run number earlier than the one which will be started/processed next in prompt/hlt/express), the DropBox will move the since ahead to go to the first safe run instead of the value you gave)\ne.g. 1234\nsince []: ')
185  if not since:
186  since = None
187  break
188  else:
189  try:
190  since = int(since)
191  break
192  except ValueError:
193  print('The since value has to be an integer or empty (null).')
194 
195  userText = getInput('',
196  '\nWrite any comments/text you may want to describe your request\ne.g. Muon alignment scenario for...\nuserText []: ')
197 
198  destinationTags = {}
199  while True:
200  destinationTag = getInput('',
201  '\nWhich is the next destination tag to be added (leave empty to stop)?\ne.g. BeamSpotObjects_PCL_byRun_v0_offline\ndestinationTag []: ')
202  if not destinationTag:
203  if len(destinationTags) == 0:
204  print('There must be at least one destination tag.')
205  continue
206  break
207 
208  if destinationTag in destinationTags:
209  print(
210  'You already added this destination tag. Overwriting the previous one with this new one.')
211 
212  destinationTags[destinationTag] = {
213  }
214 
215  metadata = {
216  'destinationDatabase': destinationDatabase,
217  'destinationTags': destinationTags,
218  'inputTag': inputTag,
219  'since': since,
220  'userText': userText,
221  }
222 
223  metadata = json.dumps(metadata, sort_keys=True, indent=4)
224  print('\nThis is the generated metadata:\n%s' % metadata)
225 
226  if getInput('n',
227  '\nIs it fine (i.e. save in %s and *upload* the conditions if this is the latest file)?\nAnswer [n]: ' % metadataFilename).lower() == 'y':
228  break
229  print('Saving generated metadata in %s...', metadataFilename)
230  with open(metadataFilename, 'wb') as metadataFile:
231  metadataFile.write(metadata)
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
def uploadConditions.testTier0Upload ( )

Definition at line 965 of file uploadConditions.py.

References uploadTier0Files().

966 def testTier0Upload():
967 
968  global defaultNetrcHost
969 
970  (username, account, password) = netrc.netrc().authenticators(defaultNetrcHost)
971 
972  filenames = ['testFiles/localSqlite-top2']
973 
974  uploadTier0Files(filenames, username, password, cookieFileName = None)
975 
def uploadConditions.upload (   options,
  arguments 
)

Definition at line 875 of file uploadConditions.py.

References print(), and uploadAllFiles().

Referenced by main(), and re_upload().

876 def upload(options, arguments):
877  results = uploadAllFiles(options, arguments)
878 
879  if 'status' not in results:
880  print('Unexpected error.')
881  return -1
882  ret = results['status']
883  print(results)
884  print("upload ended with code: %s" %ret)
885  return ret
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
def uploadConditions.uploadAllFiles (   options,
  arguments 
)

Definition at line 647 of file uploadConditions.py.

References getCredentials(), getInput(), runWizard(), and str.

Referenced by upload().

648 def uploadAllFiles(options, arguments):
649 
650  ret = {}
651  ret['status'] = 0
652 
653  # Check that we can read the data and metadata files
654  # If the metadata file does not exist, start the wizard
655  for filename in arguments:
656  basepath = filename.rsplit('.db', 1)[0].rsplit('.txt', 1)[0]
657  basename = os.path.basename(basepath)
658  dataFilename = '%s.db' % basepath
659  metadataFilename = '%s.txt' % basepath
660 
661  logging.info('Checking %s...', basename)
662 
663  # Data file
664  try:
665  with open(dataFilename, 'rb') as dataFile:
666  pass
667  except IOError as e:
668  errMsg = 'Impossible to open SQLite data file %s' %dataFilename
669  logging.error( errMsg )
670  ret['status'] = -3
671  ret['error'] = errMsg
672  return ret
673 
674  # Check the data file
675  empty = True
676  try:
677  dbcon = sqlite3.connect( dataFilename )
678  dbcur = dbcon.cursor()
679  dbcur.execute('SELECT * FROM IOV')
680  rows = dbcur.fetchall()
681  for r in rows:
682  empty = False
683  dbcon.close()
684  if empty:
685  errMsg = 'The input SQLite data file %s contains no data.' %dataFilename
686  logging.error( errMsg )
687  ret['status'] = -4
688  ret['error'] = errMsg
689  return ret
690  except Exception as e:
691  errMsg = 'Check on input SQLite data file %s failed: %s' %(dataFilename,str(e))
692  logging.error( errMsg )
693  ret['status'] = -5
694  ret['error'] = errMsg
695  return ret
696 
697  # Metadata file
698  try:
699  with open(metadataFilename, 'rb') as metadataFile:
700  pass
701  except IOError as e:
702  if e.errno != errno.ENOENT:
703  errMsg = 'Impossible to open file %s (for other reason than not existing)' %metadataFilename
704  logging.error( errMsg )
705  ret['status'] = -4
706  ret['error'] = errMsg
707  return ret
708 
709  if getInput('y', '\nIt looks like the metadata file %s does not exist. Do you want me to create it and help you fill it?\nAnswer [y]: ' % metadataFilename).lower() != 'y':
710  errMsg = 'Metadata file %s does not exist' %metadataFilename
711  logging.error( errMsg )
712  ret['status'] = -5
713  ret['error'] = errMsg
714  return ret
715  # Wizard
716  runWizard(basename, dataFilename, metadataFilename)
717 
718  # Upload files
719  try:
720  dropBox = ConditionsUploader(options.hostname, options.urlTemplate)
721 
722  # Authentication
723  username, password = getCredentials(options)
724 
725  results = {}
726  for filename in arguments:
727  backend = options.backend
728  basepath = filename.rsplit('.db', 1)[0].rsplit('.txt', 1)[0]
729  metadataFilename = '%s.txt' % basepath
730  with open(metadataFilename, 'rb') as metadataFile:
731  metadata = json.load( metadataFile )
732  # When dest db = prep the hostname has to be set to dev.
733  forceHost = False
734  destDb = metadata['destinationDatabase']
735  if destDb.startswith('oracle://cms_orcon_prod') or destDb.startswith('oracle://cms_orcoff_prep'):
736  hostName = defaultHostname
737  if destDb.startswith('oracle://cms_orcoff_prep'):
738  hostName = defaultDevHostname
739  dropBox.setHost( hostName )
740  authRet = dropBox.signIn( username, password )
741  if not authRet==0:
742  msg = "Error trying to connect to the server. Aborting."
743  if authRet==-2:
744  msg = "Error while signin in. Aborting."
745  logging.error(msg)
746  return { 'status' : authRet, 'error' : msg }
747  results[filename] = dropBox.uploadFile(filename, options.backend, options.temporaryFile)
748  else:
749  results[filename] = False
750  logging.error("DestinationDatabase %s is not valid. Skipping the upload." %destDb)
751  if not results[filename]:
752  if ret['status']<0:
753  ret['status'] = 0
754  ret['status'] += 1
755  ret['files'] = results
756  logging.debug("all files processed, logging out now.")
757 
758  dropBox.signOut()
759 
760  except HTTPError as e:
761  logging.error('got HTTP error: %s', str(e))
762  return { 'status' : -1, 'error' : str(e) }
763 
764  return ret
#define str(s)
def uploadConditions.uploadTier0Files (   filenames,
  username,
  password,
  cookieFileName = None 
)
Uploads a bunch of files coming from Tier0.
This has the following requirements:
    * Username/Password based authentication.
    * Uses the online backend.
    * Ignores errors related to the upload/content (e.g. duplicated file).

Definition at line 765 of file uploadConditions.py.

Referenced by testTier0Upload().

766 def uploadTier0Files(filenames, username, password, cookieFileName = None):
767  '''Uploads a bunch of files coming from Tier0.
768  This has the following requirements:
769  * Username/Password based authentication.
770  * Uses the online backend.
771  * Ignores errors related to the upload/content (e.g. duplicated file).
772  '''
773 
774  dropBox = ConditionsUploader()
775 
776  dropBox.signIn(username, password)
777 
778  for filename in filenames:
779  try:
780  result = dropBox.uploadFile(filename, backend = 'test')
781  except HTTPError as e:
782  if e.code == 400:
783  # 400 Bad Request: This is an exception related to the upload
784  # being wrong for some reason (e.g. duplicated file).
785  # Since for Tier0 this is not an issue, continue
786  logging.error('HTTP Exception 400 Bad Request: Upload-related, skipping. Message: %s', e)
787  continue
788 
789  # In any other case, re-raise.
790  raise
791 
792  #-toDo: add a flag to say if we should retry or not. So far, all retries are done server-side (Tier-0),
793  # if we flag as failed any retry would not help and would result in the same error (e.g.
794  # when a file with an identical hash is uploaded again)
795  #-review(2015-09-25): get feedback from tests at Tier-0 (action: AP)
796 
797  if not result: # dropbox reported an error when uploading, do not retry.
798  logging.error('Error from dropbox, upload-related, skipping.')
799  continue
800 
801  dropBox.signOut()

Variable Documentation

string uploadConditions.__author__ = 'Andreas Pfeiffer'

Definition at line 7 of file uploadConditions.py.

string uploadConditions.__copyright__ = 'Copyright 2015, CERN CMS'

Definition at line 8 of file uploadConditions.py.

list uploadConditions.__credits__ = ['Giacomo Govi', 'Salvatore Di Guida', 'Miguel Ojeda', 'Andreas Pfeiffer']

Definition at line 9 of file uploadConditions.py.

string uploadConditions.__email__ = 'giacomo.govi@cern.ch'

Definition at line 12 of file uploadConditions.py.

string uploadConditions.__license__ = 'Unknown'

Definition at line 10 of file uploadConditions.py.

string uploadConditions.__maintainer__ = 'Giacomo Govi'

Definition at line 11 of file uploadConditions.py.

int uploadConditions.__version__ = 1

Definition at line 22 of file uploadConditions.py.

string uploadConditions.authPathEnvVar = 'COND_AUTH_PATH'

Definition at line 40 of file uploadConditions.py.

string uploadConditions.CERN_SSO_CURL_CAPATH = '/etc/pki/tls/certs'

Definition at line 218 of file uploadConditions.py.

string uploadConditions.defaultBackend = 'online'

Definition at line 30 of file uploadConditions.py.

string uploadConditions.defaultDevHostname = 'cms-conddb-dev.cern.ch'

Definition at line 32 of file uploadConditions.py.

string uploadConditions.defaultHostname = 'cms-conddb-prod.cern.ch'

Definition at line 31 of file uploadConditions.py.

string uploadConditions.defaultNetrcHost = 'ConditionUploader'

Definition at line 35 of file uploadConditions.py.

string uploadConditions.defaultTemporaryFile = 'upload.tar.bz2'

Definition at line 34 of file uploadConditions.py.

string uploadConditions.defaultUrlTemplate = 'https://%s/cmsDbUpload/'

Definition at line 33 of file uploadConditions.py.

string uploadConditions.defaultWorkflow = 'offline'

Definition at line 36 of file uploadConditions.py.

string uploadConditions.devLogDbSrv = 'cms_orcoff_prep'

Definition at line 38 of file uploadConditions.py.

list uploadConditions.final_service_url = upload_metadata["server"]

Definition at line 444 of file uploadConditions.py.

string uploadConditions.horizontal_rule = "="

Definition at line 45 of file uploadConditions.py.

string uploadConditions.logDbSchema = 'CMS_COND_DROPBOX'

Definition at line 39 of file uploadConditions.py.

string uploadConditions.prodLogDbSrv = 'cms_orcoff_prod'

Definition at line 37 of file uploadConditions.py.

tuple uploadConditions.response = get_version(final_service_url)

Definition at line 446 of file uploadConditions.py.

Referenced by JetTester.analyze(), JetTester_HeavyIons.analyze(), evf::EvFDaqDirector.contactFileBroker(), HBHEDarkening.degradation(), HLTTauCertifier.dqmEndJob(), PFJetAnalyzerDQM.fillJetResponse(), PFJetFilter.filter(), GBRForest.GetGradBoostClassifier(), HCALResponse.getHCALEnergyResponse(), GBRForest.GetResponse(), GBRForestD.GetResponse(), XrdAdaptor::Source.getXrootdSiteFromURL(), MakerResponseHandler.HandleResponse(), XrdAdaptor::ClientRequest.HandleResponse(), SendMonitoringInfoHandler.HandleResponse(), PhotonMVAEstimator.mvaValue(), ElectronMVAEstimatorRun2.mvaValue(), cms::MuonTCMETValueMapProducer.produce(), MVAValueMapProducer< ParticleType >.produce(), GBRForest.SetInitialResponse(), and GBRForestD.SetInitialResponse().

tuple uploadConditions.server_version = response.json()

Definition at line 447 of file uploadConditions.py.

Referenced by evf::EvFDaqDirector.contactFileBroker().

tuple uploadConditions.upload_metadata = parse_arguments()

Definition at line 441 of file uploadConditions.py.

dictionary uploadConditions.upload_metadata_argument = {}

Definition at line 473 of file uploadConditions.py.

int uploadConditions.waitForRetry = 15

Definition at line 41 of file uploadConditions.py.