2 '''Script that uploads to the new CMS conditions uploader.
3 Adapted to the new infrastructure from v6 of the upload.py script for the DropBox from Miguel Ojeda.
6 __author__ =
'Andreas Pfeiffer'
7 __copyright__ =
'Copyright 2015, CERN CMS'
8 __credits__ = [
'Giacomo Govi',
'Salvatore Di Guida',
'Miguel Ojeda',
'Andreas Pfeiffer']
9 __license__ =
'Unknown'
10 __maintainer__ =
'Andreas Pfeiffer'
11 __email__ =
'andreas.pfeiffer@cern.ch'
26 defaultBackend =
'online'
27 defaultHostname =
'cms-conddb-prod.cern.ch'
28 defaultDevHostname =
'cms-conddb-dev.cern.ch'
29 defaultUrlTemplate =
'https://%s/cmsDbUpload/'
30 defaultTemporaryFile =
'upload.tar.bz2'
31 defaultNetrcHost =
'ConditionUploader'
32 defaultWorkflow =
'offline'
44 '''A common HTTP exception.
46 self.code is the response HTTP code as an integer.
47 self.response is the response body (i.e. page).
56 self.
args = (response.split(
'<p>')[1].
split(
'</p>')[0], )
61 CERN_SSO_CURL_CAPATH =
'/etc/pki/tls/certs'
64 '''Class used for querying URLs using the HTTP protocol.
67 retryCodes = frozenset([502, 503])
74 self.curl.setopt(self.curl.COOKIEFILE,
'')
79 self.curl.setopt(self.curl.SSL_VERIFYPEER, 0)
80 self.curl.setopt(self.curl.SSL_VERIFYHOST, 2)
87 '''Returns the list of cookies.
89 return self.curl.getinfo(self.curl.INFO_COOKIELIST)
94 self.curl.setopt(self.curl.COOKIELIST,
'ALL')
98 '''Allows to set a base URL which will be prefixed to all the URLs
99 that will be queried later.
105 '''Allows to set a proxy.
107 self.curl.setopt(self.curl.PROXY, proxy)
111 '''Allows to set a timeout.
113 self.curl.setopt(self.curl.TIMEOUT, timeout)
117 '''Allows to set retries.
119 The retries are a sequence of the seconds to wait per retry.
121 The retries are done on:
122 * PyCurl errors (includes network problems, e.g. not being able
123 to connect to the host).
124 * 502 Bad Gateway (for the moment, to avoid temporary
125 Apache-CherryPy issues).
126 * 503 Service Temporarily Unavailable (for when we update
135 self.curl.setopt(pycurl.URL, url)
136 self.curl.setopt(pycurl.VERBOSE, 0)
146 self.curl.setopt(pycurl.HTTPHEADER, [
'Accept: application/json'])
148 self.curl.setopt(self.curl.HTTPGET, 0)
150 response = cStringIO.StringIO()
151 self.curl.setopt(pycurl.WRITEFUNCTION, response.write)
152 self.curl.setopt(pycurl.USERPWD,
'%s:%s' % (username, password) )
154 logging.debug(
'going to connect to server at: %s' % url )
157 code = self.curl.getinfo(pycurl.RESPONSE_CODE)
158 logging.debug(
'got: %s ', str(code))
161 self.
token = json.loads( response.getvalue() )[
'token']
162 except Exception
as e:
163 logging.error(
'http::getToken> got error from server: %s ', str(e) )
164 if 'No JSON object could be decoded' in str(e):
166 logging.error(
"error getting token: %s", str(e))
169 logging.debug(
'token: %s', self.
token)
170 logging.debug(
'returning: %s', response.getvalue())
172 return response.getvalue()
174 def query(self, url, data = None, files = None, keepCookies = True):
175 '''Queries a URL, optionally with some data (dictionary).
177 If no data is specified, a GET request will be used.
178 If some data is specified, a POST request will be used.
180 If files is specified, it must be a dictionary like data but
181 the values are filenames.
183 By default, cookies are kept in-between requests.
185 A HTTPError exception is raised if the response's HTTP code is not 200.
194 data4log = copy.copy(data)
196 if 'password' in data4log.keys():
197 data4log[
'password'] =
'*'
202 logging.debug(
'Querying %s with data %s and files %s (retries left: %s, current sleep: %s)...', url, data4log, files, len(retries), retries[0])
204 time.sleep(retries.pop(0))
207 self.curl.setopt(self.curl.URL, url)
208 self.curl.setopt(self.curl.HTTPGET, 1)
211 self.curl.setopt(pycurl.USERPWD,
'%s:""' % ( str(self.
token), ) )
212 self.curl.setopt(pycurl.HTTPHEADER, [
'Accept: application/json'])
214 if data
is not None or files
is not None:
220 finalData.update(data)
222 if files
is not None:
223 for (key, fileName)
in files.items():
224 finalData[key] = (self.curl.FORM_FILE, fileName)
225 self.curl.setopt( self.curl.HTTPPOST, finalData.items() )
227 self.curl.setopt(pycurl.VERBOSE, 0)
229 response = cStringIO.StringIO()
230 self.curl.setopt(self.curl.WRITEFUNCTION, response.write)
233 code = self.curl.getinfo(self.curl.RESPONSE_CODE)
235 if code
in self.
retryCodes and len(retries) > 0:
236 logging.debug(
'Retrying since we got the %s error code...', code)
240 raise HTTPError(code, response.getvalue())
242 return response.getvalue()
244 except pycurl.error
as e:
245 if len(retries) == 0:
247 logging.debug(
'Retrying since we got the %s pycurl exception...', str(e))
252 tarInfo = tarFile.gettarinfo(fileobj = fileobj, arcname = arcname)
254 tarInfo.uid = tarInfo.gid = tarInfo.mtime = 0
255 tarInfo.uname = tarInfo.gname =
'root'
256 tarFile.addfile(tarInfo, fileobj)
259 '''Upload conditions to the CMS conditions uploader service.
262 def __init__(self, hostname = defaultHostname, urlTemplate = defaultUrlTemplate):
277 '''Signs in the server.
280 logging.info(
'%s: Signing in user %s ...', self.
hostname, username)
282 self.
token = self.http.getToken(username, password)
283 except Exception
as e:
284 logging.error(
"Caught exception when trying to get token for user %s from %s: %s" % (username, self.
hostname, str(e)) )
288 logging.error(
"could not get token for user %s from %s" % (username, self.
hostname) )
291 logging.debug(
"got: '%s'", str(self.
token) )
300 '''Signs out the server.
303 logging.info(
'%s: Signing out...', self.
hostname)
308 def uploadFile(self, filename, backend = defaultBackend, temporaryFile = defaultTemporaryFile):
309 '''Uploads a file to the dropBox.
311 The filename can be without extension, with .db or with .txt extension.
312 It will be stripped and then both .db and .txt files are used.
315 basepath = filename.rsplit(
'.db', 1)[0].rsplit(
'.txt', 1)[0]
316 metadataFilename =
'%s.txt' % basepath
317 with open(metadataFilename,
'rb')
as metadataFile:
318 metadata = json.load( metadataFile )
321 destDb = metadata[
'destinationDatabase']
323 if destDb.startswith(
'oracle://cms_orcon_prod')
or destDb.startswith(
'oracle://cms_orcoff_prep'):
324 if destDb.startswith(
'oracle://cms_orcoff_prep'):
325 self.
setHost( defaultDevHostname )
328 ret = self.
_uploadFile(filename, backend, temporaryFile)
331 self.
setHost( defaultHostname )
334 logging.error(
"DestinationDatabase %s is not valid. Skipping the upload." %destDb)
337 def _uploadFile(self, filename, backend = defaultBackend, temporaryFile = defaultTemporaryFile):
339 basepath = filename.rsplit(
'.db', 1)[0].rsplit(
'.txt', 1)[0]
340 basename = os.path.basename(basepath)
342 logging.debug(
'%s: %s: Creating tar file for upload ...', self.
hostname, basename)
345 tarFile = tarfile.open(temporaryFile,
'w:bz2')
347 with open(
'%s.db' % basepath,
'rb')
as data:
349 except Exception
as e:
350 msg =
'Error when creating tar file. \n'
351 msg +=
'Please check that you have write access to the directory you are running,\n'
352 msg +=
'and that you have enough space on this disk (df -h .)\n'
356 with tempfile.NamedTemporaryFile()
as metadata:
357 with open(
'%s.txt' % basepath,
'rb')
as originalMetadata:
358 json.dump(json.load(originalMetadata), metadata, sort_keys =
True, indent = 4)
365 logging.debug(
'%s: %s: Calculating hash...', self.
hostname, basename)
367 fileHash = hashlib.sha1()
368 with open(temporaryFile,
'rb')
as f:
370 data = f.read(4 * 1024 * 1024)
373 fileHash.update(data)
375 fileHash = fileHash.hexdigest()
376 fileInfo = os.stat(temporaryFile)
377 fileSize = fileInfo.st_size
379 logging.debug(
'%s: %s: Hash: %s', self.
hostname, basename, fileHash)
381 logging.info(
'%s: %s: Uploading file (%s, size %s) to the %s backend...', self.
hostname, basename, fileHash, fileSize, backend)
382 os.rename(temporaryFile, fileHash)
384 ret = self.http.query(
'uploadFile',
387 'fileName': basename,
391 'uploadedFile': fileHash,
394 except Exception
as e:
395 logging.error(
'Error from uploading: %s' % str(e))
396 ret = json.dumps( {
"status": -1,
"upload" : {
'itemStatus' : { basename : {
'status':
'failed',
'info':str(e)}}},
"error" : str(e)} )
400 statusInfo = json.loads(ret)[
'upload']
401 logging.debug(
'upload returned: %s', statusInfo )
406 for tag, info
in statusInfo[
'itemStatus'].
items():
407 logging.debug(
'checking tag %s, info %s', tag, str(json.dumps(info, indent=4,sort_keys=
True)) )
408 if 'ok' in info[
'status'].lower() :
410 logging.info(
'tag %s successfully uploaded', tag)
411 if 'skip' in info[
'status'].lower() :
412 skippedTags.append( tag )
413 logging.warning(
'found tag %s to be skipped. reason: \n ... \t%s ', tag, info[
'info'])
414 if 'fail' in info[
'status'].lower() :
415 failedTags.append( tag )
416 logging.error(
'found tag %s failed to upload. reason: \n ... \t%s ', tag, info[
'info'])
418 if len(okTags) > 0: logging.info (
"tags sucessfully uploaded: %s ", str(okTags) )
419 if len(skippedTags) > 0: logging.warning(
"tags SKIPped to upload : %s ", str(skippedTags) )
420 if len(failedTags) > 0: logging.error (
"tags FAILed to upload : %s ", str(failedTags) )
422 fileLogURL =
'https://%s/logs/dropBox/getFileLog?fileHash=%s'
423 logging.info(
'file log at: %s', fileLogURL % (self.
hostname,fileHash))
How EventSelector::AcceptEvent() decides whether to accept an event for output otherwise it is excluding the probing of A single or multiple positive and the trigger will pass if any such matching triggers are PASS or EXCEPTION[A criterion thatmatches no triggers at all is detected and causes a throw.] A single negative with an expectation of appropriate bit checking in the decision and the trigger will pass if any such matching triggers are FAIL or EXCEPTION A wildcarded negative criterion that matches more than one trigger in the trigger list("!*","!HLTx*"if it matches 2 triggers or more) will accept the event if all the matching triggers are FAIL.It will reject the event if any of the triggers are PASS or EXCEPTION(this matches the behavior of"!*"before the partial wildcard feature was incorporated).Triggers which are in the READY state are completely ignored.(READY should never be returned since the trigger paths have been run