2 '''Script that uploads to the new CMS conditions uploader.
3 Adapted to the new infrastructure from v6 of the upload.py script for the DropBox from Miguel Ojeda.
6 __author__ =
'Andreas Pfeiffer'
7 __copyright__ =
'Copyright 2015, CERN CMS'
8 __credits__ = [
'Giacomo Govi',
'Salvatore Di Guida',
'Miguel Ojeda',
'Andreas Pfeiffer']
9 __license__ =
'Unknown'
10 __maintainer__ =
'Giacomo Govi'
11 __email__ =
'giacomo.govi@cern.ch'
27 from datetime
import datetime
29 defaultBackend =
'online'
30 defaultHostname =
'cms-conddb-prod.cern.ch'
31 defaultDevHostname =
'cms-conddb-dev.cern.ch'
32 defaultUrlTemplate =
'https://%s/cmsDbUpload/'
33 defaultTemporaryFile =
'upload.tar.bz2'
34 defaultNetrcHost =
'ConditionUploader'
35 defaultWorkflow =
'offline'
36 prodLogDbSrv =
'cms_orcoff_prod'
37 devLogDbSrv =
'cms_orcoff_prep'
38 logDbSchema =
'CMS_COND_DROPBOX'
50 '''Like raw_input() but with a default and automatic strip().
53 answer = raw_input(prompt)
57 return default.strip()
61 '''Like getInput() but tailored to get target workflows (synchronization options).
65 workflow =
getInput(defaultWorkflow, prompt)
67 if workflow
in frozenset([
'offline',
'hlt',
'express',
'prompt',
'pcl']):
70 logging.error(
'Please specify one of the allowed workflows. See above for the explanation on each of them.')
74 '''Makes the user choose from a list of options.
81 return optionsList[int(index)]
83 logging.error(
'Please specify an index of the list (i.e. integer).')
85 logging.error(
'The index you provided is not in the given list.')
89 '''Like raw_input() but repeats if nothing is provided and automatic strip().
93 answer = raw_input(prompt)
97 logging.error(
'You need to provide a value.')
100 def runWizard(basename, dataFilename, metadataFilename):
102 print '''\nWizard for metadata for %s
104 I will ask you some questions to fill the metadata file. For some of the questions there are defaults between square brackets (i.e. []), leave empty (i.e. hit Enter) to use them.''' % basename
108 dataConnection = sqlite3.connect(dataFilename)
109 dataCursor = dataConnection.cursor()
110 dataCursor.execute(
'select name from sqlite_master where type == "table"')
111 tables = set(
zip(*dataCursor.fetchall())[0])
115 dataCursor.execute(
'select NAME from TAG')
120 inputTags = dataCursor.fetchall()
121 if len(inputTags) == 0:
123 inputTags =
zip(*inputTags)[0]
128 if len(inputTags) == 0:
129 print '\nI could not find any input tag in your data file, but you can still specify one manually.'
132 '\nWhich is the input tag (i.e. the tag to be read from the SQLite data file)?\ne.g. BeamSpotObject_ByRun\ninputTag: ')
135 print '\nI found the following input tags in your SQLite data file:'
136 for (index, inputTag)
in enumerate(inputTags):
137 print ' %s) %s' % (index, inputTag)
140 '\nWhich is the input tag (i.e. the tag to be read from the SQLite data file)?\ne.g. 0 (you select the first in the list)\ninputTag [0]: ')
142 destinationDatabase =
''
144 while ( destinationDatabase !=
'oracle://cms_orcon_prod/CMS_CONDITIONS' and destinationDatabase !=
'oracle://cms_orcoff_prep/CMS_CONDITIONS' ):
147 '\nWhich is the destination database where the tags should be exported? \nPossible choices: oracle://cms_orcon_prod/CMS_CONDITIONS (for prod) or oracle://cms_orcoff_prep/CMS_CONDITIONS (for prep) \ndestinationDatabase: '
150 '\nPlease choose one of the two valid destinations: \noracle://cms_orcon_prod/CMS_CONDITIONS (for prod) or oracle://cms_orcoff_prep/CMS_CONDITIONS (for prep) \
151 \ndestinationDatabase: '
153 raise Exception(
'No valid destination chosen. Bailing out...')
159 '\nWhich is the given since? (if not specified, the one from the SQLite data file will be taken -- note that even if specified, still this may not be the final since, depending on the synchronization options you select later: if the synchronization target is not offline, and the since you give is smaller than the next possible one (i.e. you give a run number earlier than the one which will be started/processed next in prompt/hlt/express), the DropBox will move the since ahead to go to the first safe run instead of the value you gave)\ne.g. 1234\nsince []: ')
168 logging.error(
'The since value has to be an integer or empty (null).')
171 '\nWrite any comments/text you may want to describe your request\ne.g. Muon alignment scenario for...\nuserText []: ')
176 '\nWhich is the next destination tag to be added (leave empty to stop)?\ne.g. BeamSpotObjects_PCL_byRun_v0_offline\ndestinationTag []: ')
177 if not destinationTag:
178 if len(destinationTags) == 0:
179 logging.error(
'There must be at least one destination tag.')
183 if destinationTag
in destinationTags:
185 'You already added this destination tag. Overwriting the previous one with this new one.')
187 destinationTags[destinationTag] = {
191 'destinationDatabase': destinationDatabase,
192 'destinationTags': destinationTags,
193 'inputTag': inputTag,
195 'userText': userText,
198 metadata = json.dumps(metadata, sort_keys=
True, indent=4)
199 print '\nThis is the generated metadata:\n%s' % metadata
202 '\nIs it fine (i.e. save in %s and *upload* the conditions if this is the latest file)?\nAnswer [n]: ' % metadataFilename).lower() ==
'y':
204 logging.info(
'Saving generated metadata in %s...', metadataFilename)
205 with open(metadataFilename,
'wb')
as metadataFile:
206 metadataFile.write(metadata)
209 '''A common HTTP exception.
211 self.code is the response HTTP code as an integer.
212 self.response is the response body (i.e. page).
221 self.
args = (response.split(
'<p>')[1].
split(
'</p>')[0], )
226 CERN_SSO_CURL_CAPATH =
'/etc/pki/tls/certs'
229 '''Class used for querying URLs using the HTTP protocol.
232 retryCodes = frozenset([502, 503])
239 self.curl.setopt(self.curl.COOKIEFILE,
'')
244 self.curl.setopt(self.curl.SSL_VERIFYPEER, 0)
245 self.curl.setopt(self.curl.SSL_VERIFYHOST, 2)
252 '''Returns the list of cookies.
254 return self.curl.getinfo(self.curl.INFO_COOKIELIST)
259 self.curl.setopt(self.curl.COOKIELIST,
'ALL')
263 '''Allows to set a base URL which will be prefixed to all the URLs
264 that will be queried later.
270 '''Allows to set a proxy.
272 self.curl.setopt(self.curl.PROXY, proxy)
276 '''Allows to set a timeout.
278 self.curl.setopt(self.curl.TIMEOUT, timeout)
282 '''Allows to set retries.
284 The retries are a sequence of the seconds to wait per retry.
286 The retries are done on:
287 * PyCurl errors (includes network problems, e.g. not being able
288 to connect to the host).
289 * 502 Bad Gateway (for the moment, to avoid temporary
290 Apache-CherryPy issues).
291 * 503 Service Temporarily Unavailable (for when we update
300 self.curl.setopt(pycurl.URL, url)
301 self.curl.setopt(pycurl.VERBOSE, 0)
311 self.curl.setopt(pycurl.HTTPHEADER, [
'Accept: application/json'])
313 self.curl.setopt(self.curl.HTTPGET, 0)
315 response = cStringIO.StringIO()
316 self.curl.setopt(pycurl.WRITEFUNCTION, response.write)
317 self.curl.setopt(pycurl.USERPWD,
'%s:%s' % (username, password) )
319 logging.debug(
'going to connect to server at: %s' % url )
322 code = self.curl.getinfo(pycurl.RESPONSE_CODE)
323 logging.debug(
'got: %s ', str(code))
326 self.
token = json.loads( response.getvalue() )[
'token']
327 except Exception
as e:
328 logging.error(
'http::getToken> got error from server: %s ', str(e) )
329 if 'No JSON object could be decoded' in str(e):
331 logging.error(
"error getting token: %s", str(e))
334 logging.debug(
'token: %s', self.
token)
335 logging.debug(
'returning: %s', response.getvalue())
337 return response.getvalue()
339 def query(self, url, data = None, files = None, keepCookies = True):
340 '''Queries a URL, optionally with some data (dictionary).
342 If no data is specified, a GET request will be used.
343 If some data is specified, a POST request will be used.
345 If files is specified, it must be a dictionary like data but
346 the values are filenames.
348 By default, cookies are kept in-between requests.
350 A HTTPError exception is raised if the response's HTTP code is not 200.
359 data4log = copy.copy(data)
361 if 'password' in data4log.keys():
362 data4log[
'password'] =
'*'
367 logging.debug(
'Querying %s with data %s and files %s (retries left: %s, current sleep: %s)...', url, data4log, files, len(retries), retries[0])
369 time.sleep(retries.pop(0))
372 self.curl.setopt(self.curl.URL, url)
373 self.curl.setopt(self.curl.HTTPGET, 1)
376 self.curl.setopt(pycurl.USERPWD,
'%s:""' % ( str(self.
token), ) )
377 self.curl.setopt(pycurl.HTTPHEADER, [
'Accept: application/json'])
379 if data
is not None or files
is not None:
385 finalData.update(data)
387 if files
is not None:
388 for (key, fileName)
in files.items():
389 finalData[key] = (self.curl.FORM_FILE, fileName)
390 self.curl.setopt( self.curl.HTTPPOST, finalData.items() )
392 self.curl.setopt(pycurl.VERBOSE, 0)
394 response = cStringIO.StringIO()
395 self.curl.setopt(self.curl.WRITEFUNCTION, response.write)
398 code = self.curl.getinfo(self.curl.RESPONSE_CODE)
400 if code
in self.
retryCodes and len(retries) > 0:
401 logging.debug(
'Retrying since we got the %s error code...', code)
405 raise HTTPError(code, response.getvalue())
407 return response.getvalue()
409 except pycurl.error
as e:
410 if len(retries) == 0:
412 logging.debug(
'Retrying since we got the %s pycurl exception...', str(e))
417 tarInfo = tarFile.gettarinfo(fileobj = fileobj, arcname = arcname)
419 tarInfo.uid = tarInfo.gid = tarInfo.mtime = 0
420 tarInfo.uname = tarInfo.gname =
'root'
421 tarFile.addfile(tarInfo, fileobj)
424 '''Upload conditions to the CMS conditions uploader service.
427 def __init__(self, hostname = defaultHostname, urlTemplate = defaultUrlTemplate):
441 if socket.getfqdn().strip().endswith(
'.cms'):
442 self.http.setProxy(
'https://cmsproxy.cms:3128/')
444 '''Signs in the server.
447 logging.info(
'%s: Signing in user %s ...', self.
hostname, username)
449 self.
token = self.http.getToken(username, password)
450 except Exception
as e:
451 logging.error(
"Caught exception when trying to get token for user %s from %s: %s" % (username, self.
hostname, str(e)) )
455 logging.error(
"could not get token for user %s from %s" % (username, self.
hostname) )
458 logging.debug(
"got: '%s'", str(self.
token) )
467 '''Signs out the server.
470 logging.info(
'%s: Signing out...', self.
hostname)
476 '''Updates this script, if a new version is found.
479 logging.debug(
'%s: Checking if a newer version of this script is available ...', self.
hostname)
480 version = int(self.http.query(
'getUploadScriptVersion'))
482 if version <= __version__:
483 logging.debug(
'%s: Script is up-to-date.', self.
hostname)
486 logging.info(
'%s: Updating to a newer version (%s) than the current one (%s): downloading ...', self.
hostname, version, __version__)
488 uploadScript = self.http.query(
'getUploadScript')
492 logging.info(
'%s: ... saving the new version ...', self.
hostname)
493 with open(sys.argv[0],
'wb')
as f:
494 f.write(uploadScript)
496 logging.info(
'%s: ... executing the new version...', self.
hostname)
497 os.execl(sys.executable, *([sys.executable] + sys.argv))
500 def uploadFile(self, filename, backend = defaultBackend, temporaryFile = defaultTemporaryFile):
501 '''Uploads a file to the dropBox.
503 The filename can be without extension, with .db or with .txt extension.
504 It will be stripped and then both .db and .txt files are used.
507 basepath = filename.rsplit(
'.db', 1)[0].rsplit(
'.txt', 1)[0]
508 basename = os.path.basename(basepath)
510 logging.debug(
'%s: %s: Creating tar file for upload ...', self.
hostname, basename)
513 tarFile = tarfile.open(temporaryFile,
'w:bz2')
515 with open(
'%s.db' % basepath,
'rb')
as data:
517 except Exception
as e:
518 msg =
'Error when creating tar file. \n'
519 msg +=
'Please check that you have write access to the directory you are running,\n'
520 msg +=
'and that you have enough space on this disk (df -h .)\n'
524 with tempfile.NamedTemporaryFile()
as metadata:
525 with open(
'%s.txt' % basepath,
'rb')
as originalMetadata:
526 json.dump(json.load(originalMetadata), metadata, sort_keys =
True, indent = 4)
533 logging.debug(
'%s: %s: Calculating hash...', self.
hostname, basename)
535 fileHash = hashlib.sha1()
536 with open(temporaryFile,
'rb')
as f:
538 data = f.read(4 * 1024 * 1024)
541 fileHash.update(data)
543 fileHash = fileHash.hexdigest()
544 fileInfo = os.stat(temporaryFile)
545 fileSize = fileInfo.st_size
547 logging.debug(
'%s: %s: Hash: %s', self.
hostname, basename, fileHash)
549 logging.info(
'%s: %s: Uploading file (%s, size %s) to the %s backend...', self.
hostname, basename, fileHash, fileSize, backend)
550 os.rename(temporaryFile, fileHash)
552 ret = self.http.query(
'uploadFile',
555 'fileName': basename,
559 'uploadedFile': fileHash,
562 except Exception
as e:
563 logging.error(
'Error from uploading: %s' % str(e))
564 ret = json.dumps( {
"status": -1,
"upload" : {
'itemStatus' : { basename : {
'status':
'failed',
'info':str(e)}}},
"error" : str(e)} )
568 statusInfo = json.loads(ret)[
'upload']
569 logging.debug(
'upload returned: %s', statusInfo )
574 for tag, info
in statusInfo[
'itemStatus'].
items():
575 logging.debug(
'checking tag %s, info %s', tag, str(json.dumps(info, indent=4,sort_keys=
True)) )
576 if 'ok' in info[
'status'].lower() :
578 logging.info(
'tag %s successfully uploaded', tag)
579 if 'skip' in info[
'status'].lower() :
580 skippedTags.append( tag )
581 logging.warning(
'found tag %s to be skipped. reason: \n ... \t%s ', tag, info[
'info'])
582 if 'fail' in info[
'status'].lower() :
583 failedTags.append( tag )
584 logging.error(
'found tag %s failed to upload. reason: \n ... \t%s ', tag, info[
'info'])
586 if len(okTags) > 0: logging.info (
"tags sucessfully uploaded: %s ", str(okTags) )
587 if len(skippedTags) > 0: logging.warning(
"tags SKIPped to upload : %s ", str(skippedTags) )
588 if len(failedTags) > 0: logging.error (
"tags FAILed to upload : %s ", str(failedTags) )
590 fileLogURL =
'https://%s/logs/dropBox/getFileLog?fileHash=%s'
591 logging.info(
'file log at: %s', fileLogURL % (self.
hostname,fileHash))
598 if options.authPath
is not None:
599 netrcPath = os.path.join( options.authPath,
'.netrc' )
602 (username, account, password) = netrc.netrc( netrcPath ).authenticators(options.netrcHost)
606 'netrc entry "%s" not found: if you wish not to have to retype your password, you can add an entry in your .netrc file. However, beware of the risks of having your password stored as plaintext. Instead.',
610 defaultUsername = getpass.getuser()
611 if defaultUsername
is None:
612 defaultUsername =
'(not found)'
614 username =
getInput(defaultUsername,
'\nUsername [%s]: ' % defaultUsername)
615 password = getpass.getpass(
'Password: ')
618 return dropBox.signIn(username, password)
628 for filename
in arguments:
629 basepath = filename.rsplit(
'.db', 1)[0].rsplit(
'.txt', 1)[0]
630 basename = os.path.basename(basepath)
631 dataFilename =
'%s.db' % basepath
632 metadataFilename =
'%s.txt' % basepath
634 logging.info(
'Checking %s...', basename)
638 with open(dataFilename,
'rb')
as dataFile:
641 errMsg =
'Impossible to open SQLite data file %s' %dataFilename
642 logging.error( errMsg )
644 ret[
'error'] = errMsg
650 dbcon = sqlite3.connect( dataFilename )
651 dbcur = dbcon.cursor()
652 dbcur.execute(
'SELECT * FROM IOV')
653 rows = dbcur.fetchall()
658 errMsg =
'The input SQLite data file %s contains no data.' %dataFilename
659 logging.error( errMsg )
661 ret[
'error'] = errMsg
663 except Exception
as e:
664 errMsg =
'Check on input SQLite data file %s failed: %s' %(dataFilename,str(e))
665 logging.error( errMsg )
667 ret[
'error'] = errMsg
672 with open(metadataFilename,
'rb')
as metadataFile:
675 if e.errno != errno.ENOENT:
676 errMsg =
'Impossible to open file %s (for other reason than not existing)' %metadataFilename
677 logging.error( errMsg )
679 ret[
'error'] = errMsg
682 if getInput(
'y',
'\nIt looks like the metadata file %s does not exist. Do you want me to create it and help you fill it?\nAnswer [y]: ' % metadataFilename).lower() !=
'y':
683 errMsg =
'Metadata file %s does not exist' %metadataFilename
684 logging.error( errMsg )
686 ret[
'error'] = errMsg
689 runWizard(basename, dataFilename, metadataFilename)
697 logging.error(
"Error authenticating user. Aborting.")
698 return {
'status' : -2,
'error' :
"Error authenticating user. Aborting." }
701 dropBox._checkForUpdates()
704 for filename
in arguments:
705 backend = options.backend
706 basepath = filename.rsplit(
'.db', 1)[0].rsplit(
'.txt', 1)[0]
707 metadataFilename =
'%s.txt' % basepath
708 with open(metadataFilename,
'rb')
as metadataFile:
709 metadata = json.load( metadataFile )
712 destDb = metadata[
'destinationDatabase']
713 if destDb.startswith(
'oracle://cms_orcon_prod')
or destDb.startswith(
'oracle://cms_orcoff_prep'):
714 if destDb.startswith(
'oracle://cms_orcoff_prep'):
715 dropBox.setHost( defaultDevHostname )
716 dropBox.signInAgain()
718 results[filename] = dropBox.uploadFile(filename, options.backend, options.temporaryFile)
721 dropBox.setHost( options.hostname )
722 dropBox.signInAgain()
724 results[filename] =
False
725 logging.error(
"DestinationDatabase %s is not valid. Skipping the upload." %destDb)
726 if not results[filename]:
730 ret[
'files'] = results
731 logging.debug(
"all files processed, logging out now.")
735 except HTTPError
as e:
736 logging.error(
'got HTTP error: %s', str(e))
737 return {
'status' : -1,
'error' : str(e) }
742 '''Uploads a bunch of files coming from Tier0.
743 This has the following requirements:
744 * Username/Password based authentication.
745 * Uses the online backend.
746 * Ignores errors related to the upload/content (e.g. duplicated file).
751 dropBox.signIn(username, password)
753 for filename
in filenames:
755 result = dropBox.uploadFile(filename, backend =
'test')
756 except HTTPError
as e:
761 logging.error(
'HTTP Exception 400 Bad Request: Upload-related, skipping. Message: %s', e)
773 logging.error(
'Error from dropbox, upload-related, skipping.')
780 logDbSrv = prodLogDbSrv
781 if options.hostname == defaultDevHostname:
782 logDbSrv = devLogDbSrv
783 if options.authPath
is not None:
784 netrcPath = os.path.join( options.authPath,
'.netrc' )
786 netrcKey =
'%s/%s' %(logDbSrv,logDbSchema)
787 print '#netrc key=%s' %netrcKey
789 (username, account, password) = netrc.netrc( netrcPath ).authenticators( netrcKey )
791 logging.error(
'Cannot access netrc file.')
793 except Exception
as e:
794 logging.error(
'Netrc file is invalid: %s' %str(e))
796 conStr =
'%s/%s@%s' %(username,password,logDbSrv)
797 con = cx_Oracle.connect( conStr )
799 fh = options.reUpload
800 cur.execute(
'SELECT FILECONTENT, STATE FROM FILES WHERE FILEHASH = :HASH',{
'HASH':fh})
806 logging.info(
"Found file %s in state '%s;" %(fh,r[1]))
810 logging.error(
"No file uploaded found with hash %s" %fh)
814 with open(fname,
"wb" )
as f:
816 rname =
'reupload_%s' %fh
817 with tarfile.open(fname)
as tar:
821 mdfile =
'metadata.txt'
822 if os.path.exists(dfile):
824 os.chmod(dfile,0o755)
825 os.rename(dfile,
'%s.db' %rname)
827 logging.error(
'Tar file does not contain the data file')
829 if os.path.exists(mdfile):
830 os.utime(mdfile,
None)
831 os.chmod(mdfile,0o755)
833 with open(mdfile)
as md:
834 mdata = json.load(md)
835 datelabel = datetime.now().strftime(
"%y-%m-%d %H:%M:%S")
837 logging.error(
'Metadata file is empty.')
839 logging.debug(
'Preparing new metadata file...')
840 mdata[
'userText'] =
'reupload %s : %s' %(datelabel,mdata[
'userText'])
841 with open(
'%s.txt' %rname,
'wb')
as jf:
842 jf.write( json.dumps( mdata, sort_keys=
True, indent = 2 ) )
846 logging.error(
'Tar file does not contain the metadata file')
848 logging.info(
'Files %s prepared for the upload.' %rname)
850 return upload(options, arguments)
855 if not results.has_key(
'status'):
856 print 'Unexpected error.'
858 ret = results[
'status']
860 print "upload ended with code: %s" %ret
867 parser = optparse.OptionParser(usage =
868 'Usage: %prog [options] <file> [<file> ...]\n'
871 parser.add_option(
'-d',
'--debug',
875 help =
'Switch on printing debug information. Default: %default',
878 parser.add_option(
'-b',
'--backend',
880 default = defaultBackend,
881 help =
'dropBox\'s backend to upload to. Default: %default',
884 parser.add_option(
'-H',
'--hostname',
886 default = defaultHostname,
887 help =
'dropBox\'s hostname. Default: %default',
890 parser.add_option(
'-u',
'--urlTemplate',
891 dest =
'urlTemplate',
892 default = defaultUrlTemplate,
893 help =
'dropBox\'s URL template. Default: %default',
896 parser.add_option(
'-f',
'--temporaryFile',
897 dest =
'temporaryFile',
898 default = defaultTemporaryFile,
899 help =
'Temporary file that will be used to store the first tar file. Note that it then will be moved to a file with the hash of the file as its name, so there will be two temporary files created in fact. Default: %default',
902 parser.add_option(
'-n',
'--netrcHost',
904 default = defaultNetrcHost,
905 help =
'The netrc host (machine) from where the username and password will be read. Default: %default',
908 parser.add_option(
'-a',
'--authPath',
911 help =
'The path of the .netrc file for the authentication. Default: $HOME',
914 parser.add_option(
'-r',
'--reUpload',
917 help =
'The hash of the file to upload again.',
920 (options, arguments) = parser.parse_args()
922 logLevel = logging.INFO
924 logLevel = logging.DEBUG
926 format =
'[%(asctime)s] %(levelname)s: %(message)s',
930 if len(arguments) < 1:
931 if options.reUpload
is None:
936 if options.reUpload
is not None:
937 print "ERROR: options -r can't be specified on a new file upload."
940 return upload(options, arguments)
944 global defaultNetrcHost
946 (username, account, password) = netrc.netrc().authenticators(defaultNetrcHost)
948 filenames = [
'testFiles/localSqlite-top2']
953 if __name__ ==
'__main__':
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
bool decode(bool &, std::string const &)
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run