2 '''Script that uploads to the new CMS conditions uploader. 3 Adapted to the new infrastructure from v6 of the upload.py script for the DropBox from Miguel Ojeda. 5 from __future__
import print_function
7 __author__ =
'Andreas Pfeiffer' 8 __copyright__ =
'Copyright 2015, CERN CMS' 9 __credits__ = [
'Giacomo Govi',
'Salvatore Di Guida',
'Miguel Ojeda',
'Andreas Pfeiffer']
10 __license__ =
'Unknown' 11 __maintainer__ =
'Giacomo Govi' 12 __email__ =
'giacomo.govi@cern.ch' 28 from datetime
import datetime
30 defaultBackend =
'online' 31 defaultHostname =
'cms-conddb-prod.cern.ch' 32 defaultDevHostname =
'cms-conddb-dev.cern.ch' 33 defaultUrlTemplate =
'https://%s/cmsDbUpload/' 34 defaultTemporaryFile =
'upload.tar.bz2' 35 defaultNetrcHost =
'ConditionUploader' 36 defaultWorkflow =
'offline' 37 prodLogDbSrv =
'cms_orcoff_prod' 38 devLogDbSrv =
'cms_orcoff_prep' 39 logDbSchema =
'CMS_COND_DROPBOX' 40 authPathEnvVar =
'COND_AUTH_PATH' 53 '''Like raw_input() but with a default and automatic strip(). 56 answer = raw_input(prompt)
60 return default.strip()
64 '''Like getInput() but tailored to get target workflows (synchronization options). 68 workflow =
getInput(defaultWorkflow, prompt)
70 if workflow
in frozenset([
'offline',
'hlt',
'express',
'prompt',
'pcl']):
73 logging.error(
'Please specify one of the allowed workflows. See above for the explanation on each of them.')
77 '''Makes the user choose from a list of options. 84 return optionsList[
int(index)]
86 logging.error(
'Please specify an index of the list (i.e. integer).')
88 logging.error(
'The index you provided is not in the given list.')
92 '''Like raw_input() but repeats if nothing is provided and automatic strip(). 96 answer = raw_input(prompt)
100 logging.error(
'You need to provide a value.')
103 def runWizard(basename, dataFilename, metadataFilename):
105 print(
'''\nWizard for metadata for %s 107 I will ask you some questions to fill the metadata file. For some of the questions there are defaults between square brackets (i.e. []), leave empty (i.e. hit Enter) to use them.''' % basename)
111 dataConnection = sqlite3.connect(dataFilename)
112 dataCursor = dataConnection.cursor()
113 dataCursor.execute(
'select name from sqlite_master where type == "table"')
114 tables = set(
zip(*dataCursor.fetchall())[0])
118 dataCursor.execute(
'select NAME from TAG')
123 inputTags = dataCursor.fetchall()
124 if len(inputTags) == 0:
126 inputTags =
zip(*inputTags)[0]
131 if len(inputTags) == 0:
132 print(
'\nI could not find any input tag in your data file, but you can still specify one manually.')
135 '\nWhich is the input tag (i.e. the tag to be read from the SQLite data file)?\ne.g. BeamSpotObject_ByRun\ninputTag: ')
138 print(
'\nI found the following input tags in your SQLite data file:')
139 for (index, inputTag)
in enumerate(inputTags):
140 print(
' %s) %s' % (index, inputTag))
143 '\nWhich is the input tag (i.e. the tag to be read from the SQLite data file)?\ne.g. 0 (you select the first in the list)\ninputTag [0]: ')
145 destinationDatabase =
'' 147 while ( destinationDatabase !=
'oracle://cms_orcon_prod/CMS_CONDITIONS' and destinationDatabase !=
'oracle://cms_orcoff_prep/CMS_CONDITIONS' ):
150 '\nWhich is the destination database where the tags should be exported? \nPossible choices: oracle://cms_orcon_prod/CMS_CONDITIONS (for prod) or oracle://cms_orcoff_prep/CMS_CONDITIONS (for prep) \ndestinationDatabase: ' 153 '\nPlease choose one of the two valid destinations: \noracle://cms_orcon_prod/CMS_CONDITIONS (for prod) or oracle://cms_orcoff_prep/CMS_CONDITIONS (for prep) \ 154 \ndestinationDatabase: ' 156 raise Exception(
'No valid destination chosen. Bailing out...')
162 '\nWhich is the given since? (if not specified, the one from the SQLite data file will be taken -- note that even if specified, still this may not be the final since, depending on the synchronization options you select later: if the synchronization target is not offline, and the since you give is smaller than the next possible one (i.e. you give a run number earlier than the one which will be started/processed next in prompt/hlt/express), the DropBox will move the since ahead to go to the first safe run instead of the value you gave)\ne.g. 1234\nsince []: ')
171 logging.error(
'The since value has to be an integer or empty (null).')
174 '\nWrite any comments/text you may want to describe your request\ne.g. Muon alignment scenario for...\nuserText []: ')
179 '\nWhich is the next destination tag to be added (leave empty to stop)?\ne.g. BeamSpotObjects_PCL_byRun_v0_offline\ndestinationTag []: ')
180 if not destinationTag:
181 if len(destinationTags) == 0:
182 logging.error(
'There must be at least one destination tag.')
186 if destinationTag
in destinationTags:
188 'You already added this destination tag. Overwriting the previous one with this new one.')
190 destinationTags[destinationTag] = {
194 'destinationDatabase': destinationDatabase,
195 'destinationTags': destinationTags,
196 'inputTag': inputTag,
198 'userText': userText,
201 metadata = json.dumps(metadata, sort_keys=
True, indent=4)
202 print(
'\nThis is the generated metadata:\n%s' % metadata)
205 '\nIs it fine (i.e. save in %s and *upload* the conditions if this is the latest file)?\nAnswer [n]: ' % metadataFilename).lower() ==
'y':
207 logging.info(
'Saving generated metadata in %s...', metadataFilename)
208 with open(metadataFilename,
'wb')
as metadataFile:
209 metadataFile.write(metadata)
212 '''A common HTTP exception. 214 self.code is the response HTTP code as an integer. 215 self.response is the response body (i.e. page). 224 self.
args = (response.split(
'<p>')[1].
split(
'</p>')[0], )
229 CERN_SSO_CURL_CAPATH =
'/etc/pki/tls/certs' 232 '''Class used for querying URLs using the HTTP protocol. 235 retryCodes = frozenset([502, 503])
242 self.curl.setopt(self.curl.COOKIEFILE,
'')
247 self.curl.setopt(self.curl.SSL_VERIFYPEER, 0)
248 self.curl.setopt(self.curl.SSL_VERIFYHOST, 2)
255 '''Returns the list of cookies. 257 return self.curl.getinfo(self.curl.INFO_COOKIELIST)
262 self.curl.setopt(self.curl.COOKIELIST,
'ALL')
266 '''Allows to set a base URL which will be prefixed to all the URLs 267 that will be queried later. 273 '''Allows to set a proxy. 275 self.curl.setopt(self.curl.PROXY, proxy)
279 '''Allows to set a timeout. 281 self.curl.setopt(self.curl.TIMEOUT, timeout)
285 '''Allows to set retries. 287 The retries are a sequence of the seconds to wait per retry. 289 The retries are done on: 290 * PyCurl errors (includes network problems, e.g. not being able 291 to connect to the host). 292 * 502 Bad Gateway (for the moment, to avoid temporary 293 Apache-CherryPy issues). 294 * 503 Service Temporarily Unavailable (for when we update 303 self.curl.setopt(pycurl.URL, url)
304 self.curl.setopt(pycurl.VERBOSE, 0)
313 self.curl.setopt(pycurl.HTTPHEADER, [
'Accept: application/json'])
315 self.curl.setopt(self.curl.HTTPGET, 0)
317 response = cStringIO.StringIO()
318 self.curl.setopt(pycurl.WRITEFUNCTION, response.write)
319 self.curl.setopt(pycurl.USERPWD,
'%s:%s' % (username, password) )
320 logging.debug(
'going to connect to server at: %s' % url )
323 code = self.curl.getinfo(pycurl.RESPONSE_CODE)
324 logging.debug(
'got: %s ',
str(code))
325 if code
in ( 502,503,504 ):
326 logging.debug(
'Trying again after %d seconds...', waitForRetry)
327 time.sleep( waitForRetry )
328 response = cStringIO.StringIO()
329 self.curl.setopt(pycurl.WRITEFUNCTION, response.write)
330 self.curl.setopt(pycurl.USERPWD,
'%s:%s' % (username, password) )
332 code = self.curl.getinfo(pycurl.RESPONSE_CODE)
333 resp = response.getvalue()
335 if code==500
and not resp.find(
"INVALID_CREDENTIALS")==-1:
336 logging.error(
"Invalid credentials provided.")
338 if code==403
and not resp.find(
"Unauthorized access")==-1:
339 logging.error(
"Unauthorized access. Please check the membership of group 'cms-cond-dropbox'")
343 self.
token = json.loads( resp )[
'token']
344 except Exception
as e:
345 errorMsg =
'Error while decoding returned json string' 346 logging.debug(
'http::getToken> error while decoding json: %s ',
str(resp) )
347 logging.debug(
"error getting token: %s",
str(e))
350 errorMsg =
'HTTP Error code %s ' %code
351 logging.debug(
'got: %s ',
str(code))
352 logging.debug(
'http::getToken> got error from server: %s ',
str(resp) )
357 logging.debug(
'token: %s', self.
token)
358 logging.debug(
'returning: %s', response.getvalue())
362 def query(self, url, data = None, files = None, keepCookies = True):
363 '''Queries a URL, optionally with some data (dictionary). 365 If no data is specified, a GET request will be used. 366 If some data is specified, a POST request will be used. 368 If files is specified, it must be a dictionary like data but 369 the values are filenames. 371 By default, cookies are kept in-between requests. 373 A HTTPError exception is raised if the response's HTTP code is not 200. 382 data4log = copy.copy(data)
384 if 'password' in data4log.keys():
385 data4log[
'password'] =
'*' 390 logging.debug(
'Querying %s with data %s and files %s (retries left: %s, current sleep: %s)...', url, data4log, files, len(retries), retries[0])
392 time.sleep(retries.pop(0))
395 self.curl.setopt(self.curl.URL, url)
396 self.curl.setopt(self.curl.HTTPGET, 1)
399 self.curl.setopt(pycurl.USERPWD,
'%s:""' % (
str(self.
token), ) )
400 self.curl.setopt(pycurl.HTTPHEADER, [
'Accept: application/json'])
402 if data
is not None or files
is not None:
408 finalData.update(data)
410 if files
is not None:
411 for (key, fileName)
in files.items():
412 finalData[key] = (self.curl.FORM_FILE, fileName)
413 self.curl.setopt( self.curl.HTTPPOST, finalData.items() )
415 self.curl.setopt(pycurl.VERBOSE, 0)
417 response = cStringIO.StringIO()
418 self.curl.setopt(self.curl.WRITEFUNCTION, response.write)
421 code = self.curl.getinfo(self.curl.RESPONSE_CODE)
423 if code
in self.retryCodes
and len(retries) > 0:
424 logging.debug(
'Retrying since we got the %s error code...', code)
428 raise HTTPError(code, response.getvalue())
430 return response.getvalue()
432 except pycurl.error
as e:
433 if len(retries) == 0:
435 logging.debug(
'Retrying since we got the %s pycurl exception...',
str(e))
440 tarInfo = tarFile.gettarinfo(fileobj = fileobj, arcname = arcname)
442 tarInfo.uid = tarInfo.gid = tarInfo.mtime = 0
443 tarInfo.uname = tarInfo.gname =
'root' 444 tarFile.addfile(tarInfo, fileobj)
447 '''Upload conditions to the CMS conditions uploader service. 450 def __init__(self, hostname = defaultHostname, urlTemplate = defaultUrlTemplate):
464 if self.
token is None:
465 logging.debug(
"Initializing connection with server %s",self.
hostname)
469 if socket.getfqdn().
strip().endswith(
'.cms'):
470 self.http.setProxy(
'https://cmsproxy.cms:3128/')
472 '''Signs in the server. 475 logging.info(
'%s: Signing in user %s ...', self.
hostname, username)
477 self.
token = self.http.getToken(username, password)
478 except Exception
as e:
484 logging.error(
"Caught exception when trying to connect to %s: %s" % (self.
hostname,
str(e)) )
488 logging.error(
"could not get token for user %s from %s" % (username, self.
hostname) )
491 logging.debug(
"got: '%s'",
str(self.
token) )
495 logging.debug(
"User %s has been already authenticated." %username)
499 '''Signs out the server. 502 logging.info(
'%s: Signing out...', self.
hostname)
508 '''Updates this script, if a new version is found. 511 logging.debug(
'%s: Checking if a newer version of this script is available ...', self.
hostname)
512 version =
int(self.http.query(
'getUploadScriptVersion'))
514 if version <= __version__:
515 logging.debug(
'%s: Script is up-to-date.', self.
hostname)
518 logging.info(
'%s: Updating to a newer version (%s) than the current one (%s): downloading ...', self.
hostname, version, __version__)
520 uploadScript = self.http.query(
'getUploadScript')
524 logging.info(
'%s: ... saving the new version ...', self.
hostname)
525 with open(sys.argv[0],
'wb')
as f:
526 f.write(uploadScript)
528 logging.info(
'%s: ... executing the new version...', self.
hostname)
529 os.execl(sys.executable, *([sys.executable] + sys.argv))
532 def uploadFile(self, filename, backend = defaultBackend, temporaryFile = defaultTemporaryFile):
533 '''Uploads a file to the dropBox. 535 The filename can be without extension, with .db or with .txt extension. 536 It will be stripped and then both .db and .txt files are used. 539 basepath = filename.rsplit(
'.db', 1)[0].rsplit(
'.txt', 1)[0]
540 basename = os.path.basename(basepath)
542 logging.debug(
'%s: %s: Creating tar file for upload ...', self.
hostname, basename)
545 tarFile = tarfile.open(temporaryFile,
'w:bz2')
547 with open(
'%s.db' % basepath,
'rb')
as data:
549 except Exception
as e:
550 msg =
'Error when creating tar file. \n' 551 msg +=
'Please check that you have write access to the directory you are running,\n' 552 msg +=
'and that you have enough space on this disk (df -h .)\n' 556 with tempfile.NamedTemporaryFile()
as metadata:
557 with open(
'%s.txt' % basepath,
'rb')
as originalMetadata:
558 json.dump(json.load(originalMetadata), metadata, sort_keys =
True, indent = 4)
565 logging.debug(
'%s: %s: Calculating hash...', self.
hostname, basename)
567 fileHash = hashlib.sha1()
568 with open(temporaryFile,
'rb')
as f:
570 data = f.read(4 * 1024 * 1024)
573 fileHash.update(data)
575 fileHash = fileHash.hexdigest()
576 fileInfo = os.stat(temporaryFile)
577 fileSize = fileInfo.st_size
579 logging.debug(
'%s: %s: Hash: %s', self.
hostname, basename, fileHash)
581 logging.info(
'%s: %s: Uploading file (%s, size %s) to the %s backend...', self.
hostname, basename, fileHash, fileSize, backend)
582 os.rename(temporaryFile, fileHash)
584 ret = self.http.query(
'uploadFile',
587 'fileName': basename,
591 'uploadedFile': fileHash,
594 except Exception
as e:
595 logging.error(
'Error from uploading: %s' %
str(e))
596 ret = json.dumps( {
"status": -1,
"upload" : {
'itemStatus' : { basename : {
'status':
'failed',
'info':
str(e)}}},
"error" :
str(e)} )
600 statusInfo = json.loads(ret)[
'upload']
601 logging.debug(
'upload returned: %s', statusInfo )
606 for tag, info
in statusInfo[
'itemStatus'].
items():
607 logging.debug(
'checking tag %s, info %s', tag,
str(json.dumps(info, indent=4,sort_keys=
True)) )
608 if 'ok' in info[
'status'].lower() :
610 logging.info(
'tag %s successfully uploaded', tag)
611 if 'skip' in info[
'status'].lower() :
612 skippedTags.append( tag )
613 logging.warning(
'found tag %s to be skipped. reason: \n ... \t%s ', tag, info[
'info'])
614 if 'fail' in info[
'status'].lower() :
615 failedTags.append( tag )
616 logging.error(
'found tag %s failed to upload. reason: \n ... \t%s ', tag, info[
'info'])
618 if len(okTags) > 0: logging.info (
"tags sucessfully uploaded: %s ",
str(okTags) )
619 if len(skippedTags) > 0: logging.warning(
"tags SKIPped to upload : %s ",
str(skippedTags) )
620 if len(failedTags) > 0: logging.error (
"tags FAILed to upload : %s ",
str(failedTags) )
622 fileLogURL =
'https://%s/logs/dropBox/getFileLog?fileHash=%s' 623 logging.info(
'file log at: %s', fileLogURL % (self.
hostname,fileHash))
632 if authPathEnvVar
in os.environ:
633 authPath = os.environ[authPathEnvVar]
634 netrcPath = os.path.join(authPath,
'.netrc')
635 if options.authPath
is not None:
636 netrcPath = os.path.join( options.authPath,
'.netrc' )
639 (username, account, password) = netrc.netrc( netrcPath ).authenticators(options.netrcHost)
643 'netrc entry "%s" not found: if you wish not to have to retype your password, you can add an entry in your .netrc file. However, beware of the risks of having your password stored as plaintext. Instead.',
647 defaultUsername = getpass.getuser()
648 if defaultUsername
is None:
649 defaultUsername =
'(not found)' 651 username =
getInput(defaultUsername,
'\nUsername [%s]: ' % defaultUsername)
652 password = getpass.getpass(
'Password: ')
654 return username, password
664 for filename
in arguments:
665 basepath = filename.rsplit(
'.db', 1)[0].rsplit(
'.txt', 1)[0]
666 basename = os.path.basename(basepath)
667 dataFilename =
'%s.db' % basepath
668 metadataFilename =
'%s.txt' % basepath
670 logging.info(
'Checking %s...', basename)
674 with open(dataFilename,
'rb')
as dataFile:
677 errMsg =
'Impossible to open SQLite data file %s' %dataFilename
678 logging.error( errMsg )
680 ret[
'error'] = errMsg
686 dbcon = sqlite3.connect( dataFilename )
687 dbcur = dbcon.cursor()
688 dbcur.execute(
'SELECT * FROM IOV')
689 rows = dbcur.fetchall()
694 errMsg =
'The input SQLite data file %s contains no data.' %dataFilename
695 logging.error( errMsg )
697 ret[
'error'] = errMsg
699 except Exception
as e:
700 errMsg =
'Check on input SQLite data file %s failed: %s' %(dataFilename,
str(e))
701 logging.error( errMsg )
703 ret[
'error'] = errMsg
708 with open(metadataFilename,
'rb')
as metadataFile:
711 if e.errno != errno.ENOENT:
712 errMsg =
'Impossible to open file %s (for other reason than not existing)' %metadataFilename
713 logging.error( errMsg )
715 ret[
'error'] = errMsg
718 if getInput(
'y',
'\nIt looks like the metadata file %s does not exist. Do you want me to create it and help you fill it?\nAnswer [y]: ' % metadataFilename).lower() !=
'y':
719 errMsg =
'Metadata file %s does not exist' %metadataFilename
720 logging.error( errMsg )
722 ret[
'error'] = errMsg
725 runWizard(basename, dataFilename, metadataFilename)
735 for filename
in arguments:
736 backend = options.backend
737 basepath = filename.rsplit(
'.db', 1)[0].rsplit(
'.txt', 1)[0]
738 metadataFilename =
'%s.txt' % basepath
739 with open(metadataFilename,
'rb')
as metadataFile:
740 metadata = json.load( metadataFile )
743 destDb = metadata[
'destinationDatabase']
744 if destDb.startswith(
'oracle://cms_orcon_prod')
or destDb.startswith(
'oracle://cms_orcoff_prep'):
745 hostName = defaultHostname
746 if destDb.startswith(
'oracle://cms_orcoff_prep'):
747 hostName = defaultDevHostname
748 dropBox.setHost( hostName )
749 authRet = dropBox.signIn( username, password )
751 msg =
"Error trying to connect to the server. Aborting." 753 msg =
"Error while signin in. Aborting." 755 return {
'status' : authRet,
'error' : msg }
756 results[filename] = dropBox.uploadFile(filename, options.backend, options.temporaryFile)
758 results[filename] =
False 759 logging.error(
"DestinationDatabase %s is not valid. Skipping the upload." %destDb)
760 if not results[filename]:
764 ret[
'files'] = results
765 logging.debug(
"all files processed, logging out now.")
769 except HTTPError
as e:
770 logging.error(
'got HTTP error: %s',
str(e))
771 return {
'status' : -1,
'error' :
str(e) }
776 '''Uploads a bunch of files coming from Tier0. 777 This has the following requirements: 778 * Username/Password based authentication. 779 * Uses the online backend. 780 * Ignores errors related to the upload/content (e.g. duplicated file). 785 dropBox.signIn(username, password)
787 for filename
in filenames:
789 result = dropBox.uploadFile(filename, backend =
'test')
790 except HTTPError
as e:
795 logging.error(
'HTTP Exception 400 Bad Request: Upload-related, skipping. Message: %s', e)
807 logging.error(
'Error from dropbox, upload-related, skipping.')
814 logDbSrv = prodLogDbSrv
815 if options.hostname == defaultDevHostname:
816 logDbSrv = devLogDbSrv
817 if options.authPath
is not None:
818 netrcPath = os.path.join( options.authPath,
'.netrc' )
820 netrcKey =
'%s/%s' %(logDbSrv,logDbSchema)
821 print(
'#netrc key=%s' %netrcKey)
823 (username, account, password) = netrc.netrc( netrcPath ).authenticators( netrcKey )
825 logging.error(
'Cannot access netrc file.')
827 except Exception
as e:
828 logging.error(
'Netrc file is invalid: %s' %
str(e))
830 conStr =
'%s/%s@%s' %(username,password,logDbSrv)
831 con = cx_Oracle.connect( conStr )
833 fh = options.reUpload
834 cur.execute(
'SELECT FILECONTENT, STATE FROM FILES WHERE FILEHASH = :HASH',{
'HASH':fh})
840 logging.info(
"Found file %s in state '%s;" %(fh,r[1]))
841 fdata = r[0].read().
decode(
'bz2')
844 logging.error(
"No file uploaded found with hash %s" %fh)
848 with open(fname,
"wb" )
as f:
850 rname =
'reupload_%s' %fh
851 with tarfile.open(fname)
as tar:
855 mdfile =
'metadata.txt' 856 if os.path.exists(dfile):
858 os.chmod(dfile,0o755)
859 os.rename(dfile,
'%s.db' %rname)
861 logging.error(
'Tar file does not contain the data file')
863 if os.path.exists(mdfile):
864 os.utime(mdfile,
None)
865 os.chmod(mdfile,0o755)
867 with open(mdfile)
as md:
868 mdata = json.load(md)
869 datelabel = datetime.now().strftime(
"%y-%m-%d %H:%M:%S")
871 logging.error(
'Metadata file is empty.')
873 logging.debug(
'Preparing new metadata file...')
874 mdata[
'userText'] =
'reupload %s : %s' %(datelabel,mdata[
'userText'])
875 with open(
'%s.txt' %rname,
'wb')
as jf:
876 jf.write( json.dumps( mdata, sort_keys=
True, indent = 2 ) )
880 logging.error(
'Tar file does not contain the metadata file')
882 logging.info(
'Files %s prepared for the upload.' %rname)
884 return upload(options, arguments)
889 if 'status' not in results:
890 print(
'Unexpected error.')
892 ret = results[
'status']
894 print(
"upload ended with code: %s" %ret)
901 parser = optparse.OptionParser(usage =
902 'Usage: %prog [options] <file> [<file> ...]\n' 905 parser.add_option(
'-d',
'--debug',
909 help =
'Switch on printing debug information. Default: %default',
912 parser.add_option(
'-b',
'--backend',
914 default = defaultBackend,
915 help =
'dropBox\'s backend to upload to. Default: %default',
918 parser.add_option(
'-H',
'--hostname',
920 default = defaultHostname,
921 help =
'dropBox\'s hostname. Default: %default',
924 parser.add_option(
'-u',
'--urlTemplate',
925 dest =
'urlTemplate',
926 default = defaultUrlTemplate,
927 help =
'dropBox\'s URL template. Default: %default',
930 parser.add_option(
'-f',
'--temporaryFile',
931 dest =
'temporaryFile',
932 default = defaultTemporaryFile,
933 help =
'Temporary file that will be used to store the first tar file. Note that it then will be moved to a file with the hash of the file as its name, so there will be two temporary files created in fact. Default: %default',
936 parser.add_option(
'-n',
'--netrcHost',
938 default = defaultNetrcHost,
939 help =
'The netrc host (machine) from where the username and password will be read. Default: %default',
942 parser.add_option(
'-a',
'--authPath',
945 help =
'The path of the .netrc file for the authentication. Default: $HOME',
948 parser.add_option(
'-r',
'--reUpload',
951 help =
'The hash of the file to upload again.',
954 (options, arguments) = parser.parse_args()
956 logLevel = logging.INFO
958 logLevel = logging.DEBUG
960 format =
'[%(asctime)s] %(levelname)s: %(message)s',
964 if len(arguments) < 1:
965 if options.reUpload
is None:
970 if options.reUpload
is not None:
971 print(
"ERROR: options -r can't be specified on a new file upload.")
974 return upload(options, arguments)
978 global defaultNetrcHost
980 (username, account, password) = netrc.netrc().authenticators(defaultNetrcHost)
982 filenames = [
'testFiles/localSqlite-top2']
987 if __name__ ==
'__main__':
def setRetries(self, retries=())
def _checkForUpdates(self)
def __init__(self, hostname=defaultHostname, urlTemplate=defaultUrlTemplate)
def uploadFile(self, filename, backend=defaultBackend, temporaryFile=defaultTemporaryFile)
def setBaseUrl(self, baseUrl='')
def getToken(self, username, password)
def addToTarFile(tarFile, fileobj, arcname)
def runWizard(basename, dataFilename, metadataFilename)
def query(self, url, data=None, files=None, keepCookies=True)
S & print(S &os, JobReport::InputFile const &f)
def setProxy(self, proxy='')
def getCredentials(options)
def upload(options, arguments)
def setTimeout(self, timeout=0)
def uploadTier0Files(filenames, username, password, cookieFileName=None)
def __init__(self, code, response)
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
bool decode(bool &, std::string const &)
def signIn(self, username, password)
def uploadAllFiles(options, arguments)
def getInputRepeat(prompt='')
def getInputChoose(optionsList, default, prompt='')
def getInput(default, prompt='')
def getInputWorkflow(prompt='')
def setHost(self, hostname)
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run