CMS 3D CMS Logo

Classes | Functions | Variables
conddb2hdf5 Namespace Reference

Classes

class  DBDataProduct
 
class  DBGlobalTag
 
class  DBPayload
 
class  DBPayloadIterator
 
class  DBTag
 
class  IOVSyncValue
 

Functions

def _checkMerge (previousIOV, newIOV, debugCopy, nExistingDataProducts)
 
def _connect (db, init, read_only, args, as_admin=False)
 
def _exists (session, primary_key, value)
 
def _high (n)
 
def _inserted_before (_IOV, timestamp)
 
def _low (n)
 
def _parse_timestamp (timestamp)
 
def canonicalProductName (product)
 
def connect (args, init=False, read_only=True, as_admin=False)
 
def external_process_get_payloads_objtype_data (queue, args, payloads)
 
def get_payloads_objtype_data (session, payloads)
 
def globalTagInfo (session, name)
 
def main ()
 
def mergeIOVs (previousIOV, newIOV)
 
def parseSince (time_type, since)
 
def previousSyncValue (syncValue)
 
def recordToType (record)
 
def sinceToIOV (sinceList, time_type)
 
def tagInfo (session, name, snapshot)
 
def timeTypeName (time_type)
 
def writeTag (tagsGroup, time_type, IOV_payloads, payloadToRefs, originalTagNames, recName)
 
def writeTagImpl (tagsGroup, name, recName, time_type, IOV_payloads, payloadToRefs, originalTagNames)
 

Variables

dictionary __typedefs
 

Function Documentation

◆ _checkMerge()

def conddb2hdf5._checkMerge (   previousIOV,
  newIOV,
  debugCopy,
  nExistingDataProducts 
)
private

Definition at line 382 of file conddb2hdf5.py.

References join().

Referenced by mergeIOVs().

382 def _checkMerge(previousIOV, newIOV, debugCopy, nExistingDataProducts):
383  #sanity check
384  #check proper number of entries
385  previousSince = -1
386  for i,e in enumerate(previousIOV):
387  if len(e[1]) != nExistingDataProducts+1:
388  raise RuntimeError("entry %i has wrong number of elements %i instead of %i"%(i,len(e[1]),nExistingDataProducts+1))
389  if previousSince >= e[0]:
390  #print(previousIOV,newIOV)
391  raise RuntimeError("IOV not in order for index %i"%i)
392  previousSince = e[0]
393 
394  previousIndex = 0
395  debugIndex =0
396  while debugIndex < len(debugCopy) and previousIndex < len(previousIOV):
397  previousSince = previousIOV[previousIndex][0]
398  debugSince = debugCopy[debugIndex][0]
399  #print("debugSince: %i, prevSince: %i"%(debugSince,previousSince))
400  #print(debugCopy)
401  #print(previousIOV)
402  if debugSince != previousSince:
403  previousIndex +=1
404  continue
405  if debugCopy[debugIndex][1] != previousIOV[previousIndex][1][:nExistingDataProducts]:
406  raise RuntimeError("packaged were not properly copied for index %i original:%s new:%s"%(debugIndex,",".join(debugCopy[debugIndex][1]),",".join(previousIOV[previousIndex][1][:nExistingDataProducts])))
407  debugIndex +=1
408  previousIndex +=1
409  if debugIndex != len(debugCopy):
410  raise RuntimeError("failed to copy forward index %i"%debugIndex)
411  newIndex = 0
412  previousIndex = 0
413  while newIndex < len(newIOV) and previousIndex < len(previousIOV):
414  previousSince = previousIOV[previousIndex][0]
415  newSince = newIOV[newIndex][0]
416  if newSince != previousSince:
417  previousIndex +=1
418  continue
419  if previousIOV[previousIndex][1][-1] != newIOV[newIndex][1]:
420  raise RuntimeError("failed to append package at index %i"%newIndex)
421  previousIndex +=1
422  newIndex +=1
423  if newIndex != len(newIOV):
424  raise RuntimeError("failed to merge IOV entry %i"%newIndex)
425 
426 
static std::string join(char **cmd)
Definition: RemoteFile.cc:21
def _checkMerge(previousIOV, newIOV, debugCopy, nExistingDataProducts)
Definition: conddb2hdf5.py:382

◆ _connect()

def conddb2hdf5._connect (   db,
  init,
  read_only,
  args,
  as_admin = False 
)
private

Definition at line 72 of file conddb2hdf5.py.

Referenced by connect().

72 def _connect(db, init, read_only, args, as_admin=False):
73 
74  logging.debug('Preparing connection to %s ...', db)
75 
76  url = conddb.make_url( db, read_only)
77  pretty_url = url
78  if url.drivername == 'oracle+frontier':
79  ws = url.host.rsplit('%2F')
80  if ws is not None:
81  pretty_url = 'frontier://%s/%s' %(ws[-1],url.database)
82  connTo = '%s [%s]' %(db,pretty_url)
83  logging.info('Connecting to %s', connTo)
84  logging.debug('DB url: %s',url)
85  verbose= 0
86  if args.verbose is not None:
87  verbose = args.verbose - 1
88  connection = conddb.connect(url, args.authPath, verbose, as_admin)
89 
90 
91  if not read_only:
92  if connection.is_read_only:
93  raise Exception('Impossible to edit a read-only database.')
94 
95  if connection.is_official:
96  if args.force:
97  if not args.yes:
98  logging.warning('You are going to edit an official database. If you are not one of the Offline DB experts but have access to the password for other reasons, please stop now.')
99  else:
100  raise Exception('Editing official databases is forbidden. Use the official DropBox to upload conditions. If you need a special intervention on the database, see the contact help: %s' % conddb.contact_help)
101  # for sqlite we trigger the implicit schema creation
102  if url.drivername == 'sqlite':
103  if init:
104  connection.init()
105  if not connection._is_valid:
106  raise Exception('No valid schema found in the database.')
107 
108  return connection
109 
110 
def _connect(db, init, read_only, args, as_admin=False)
Definition: conddb2hdf5.py:72

◆ _exists()

def conddb2hdf5._exists (   session,
  primary_key,
  value 
)
private

Definition at line 61 of file conddb2hdf5.py.

References submitPVResolutionJobs.count, and ALCARECOTkAlBeamHalo_cff.filter.

Referenced by globalTagInfo(), and tagInfo().

61 def _exists(session, primary_key, value):
62  ret = None
63  try:
64  ret = session.query(primary_key).\
65  filter(primary_key == value).\
66  count() != 0
67  except sqlalchemy.exc.OperationalError:
68  pass
69 
70  return ret
71 
def _exists(session, primary_key, value)
Definition: conddb2hdf5.py:61

◆ _high()

def conddb2hdf5._high (   n)
private

Definition at line 130 of file conddb2hdf5.py.

References createfilelist.int.

Referenced by parseSince().

130 def _high(n):
131  return int(n) >> 32
132 
def _high(n)
Definition: conddb2hdf5.py:130

◆ _inserted_before()

def conddb2hdf5._inserted_before (   _IOV,
  timestamp 
)
private
To be used inside filter().

Definition at line 30 of file conddb2hdf5.py.

References _parse_timestamp().

Referenced by tagInfo().

30 def _inserted_before(_IOV,timestamp):
31  '''To be used inside filter().
32  '''
33 
34  if timestamp is None:
35  # XXX: Returning None does not get optimized (skipped) by SQLAlchemy,
36  # and returning True does not work in Oracle (generates "and 1"
37  # which breaks Oracle but not SQLite). For the moment just use
38  # this dummy condition.
39  return sqlalchemy.literal(True) == sqlalchemy.literal(True)
40 
41  return _IOV.insertion_time <= _parse_timestamp(timestamp)
42 
def _parse_timestamp(timestamp)
Definition: conddb2hdf5.py:43
def _inserted_before(_IOV, timestamp)
Definition: conddb2hdf5.py:30

◆ _low()

def conddb2hdf5._low (   n)
private

Definition at line 133 of file conddb2hdf5.py.

References createfilelist.int.

Referenced by parseSince().

133 def _low(n):
134  return int(n) & 0xffffffff
135 
136 #end from conddb
137 
138 #based on conddb._dump_payload
def _low(n)
Definition: conddb2hdf5.py:133

◆ _parse_timestamp()

def conddb2hdf5._parse_timestamp (   timestamp)
private

Definition at line 43 of file conddb2hdf5.py.

Referenced by _inserted_before(), and conddblib._inserted_before().

43 def _parse_timestamp(timestamp):
44  try:
45  return datetime.datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S.%f')
46  except ValueError:
47  pass
48 
49  try:
50  return datetime.datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S')
51  except ValueError:
52  pass
53 
54  try:
55  return datetime.datetime.strptime(timestamp, '%Y-%m-%d')
56  except ValueError:
57  pass
58 
59  raise Exception("Could not parse timestamp '%s'" % timestamp)
60 
def _parse_timestamp(timestamp)
Definition: conddb2hdf5.py:43

◆ canonicalProductName()

def conddb2hdf5.canonicalProductName (   product)

Definition at line 541 of file conddb2hdf5.py.

Referenced by conddb2hdf5.DBPayloadIterator.__next__().

541 def canonicalProductName(product):
542  return __typedefs.get(product,product)
543 
def canonicalProductName(product)
Definition: conddb2hdf5.py:541

◆ connect()

def conddb2hdf5.connect (   args,
  init = False,
  read_only = True,
  as_admin = False 
)

Definition at line 111 of file conddb2hdf5.py.

References _connect(), and DeadROC_duringRun.dir.

Referenced by external_process_get_payloads_objtype_data(), and main().

111 def connect(args, init=False, read_only=True, as_admin=False):
112  args.force = args.force if 'force' in dir(args) else False
113 
114  if 'destdb' in args:
115  if args.destdb is None:
116  args.destdb = args.db
117  if args.db == args.destdb:
118  conn1 = _connect(args.destdb, init, read_only, args)
119  return conn1, conn1
120  conn1 = _connect( args.db, init, True, args)
121  conn2url = conddb.make_url(args.destdb, False)
122  if conn2url.drivername == 'sqlite' and not os.path.exists(args.destdb):
123  init = True
124  conn2 = _connect(args.destdb, init, False, args)
125  return conn1, conn2
126 
127  return _connect( args.db, init, read_only, args, as_admin)
128 
129 
def _connect(db, init, read_only, args, as_admin=False)
Definition: conddb2hdf5.py:72
def connect(args, init=False, read_only=True, as_admin=False)
Definition: conddb2hdf5.py:111

◆ external_process_get_payloads_objtype_data()

def conddb2hdf5.external_process_get_payloads_objtype_data (   queue,
  args,
  payloads 
)

Definition at line 146 of file conddb2hdf5.py.

References connect(), and get_payloads_objtype_data().

146 def external_process_get_payloads_objtype_data(queue, args, payloads):
147  connection = connect(args)
148  session = connection.session()
149  queue.put(get_payloads_objtype_data(session, payloads))
150 #local
151 
def external_process_get_payloads_objtype_data(queue, args, payloads)
Definition: conddb2hdf5.py:146
def connect(args, init=False, read_only=True, as_admin=False)
Definition: conddb2hdf5.py:111
def get_payloads_objtype_data(session, payloads)
Definition: conddb2hdf5.py:139

◆ get_payloads_objtype_data()

def conddb2hdf5.get_payloads_objtype_data (   session,
  payloads 
)

Definition at line 139 of file conddb2hdf5.py.

References python.cmstools.all(), and ALCARECOTkAlBeamHalo_cff.filter.

Referenced by external_process_get_payloads_objtype_data().

139 def get_payloads_objtype_data(session, payloads):
140 
141  Payload = session.get_dbtype(conddb.Payload)
142  table = session.query(Payload.hash, Payload.object_type, Payload.data).\
143  filter(Payload.hash.in_(payloads)).order_by(Payload.hash).all()
144  return table
145 
def all(container)
workaround iterator generators for ROOT classes
Definition: cmstools.py:25
def get_payloads_objtype_data(session, payloads)
Definition: conddb2hdf5.py:139

◆ globalTagInfo()

def conddb2hdf5.globalTagInfo (   session,
  name 
)

Definition at line 330 of file conddb2hdf5.py.

References _exists(), python.cmstools.all(), and ALCARECOTkAlBeamHalo_cff.filter.

330 def globalTagInfo(session,name):
331  GlobalTag = session.get_dbtype(conddb.GlobalTag)
332  GlobalTagMap = session.get_dbtype(conddb.GlobalTagMap)
333  try:
334  is_global_tag = _exists(session, GlobalTag.name, name)
335  if is_global_tag:
336  return session.query(GlobalTagMap.record, GlobalTagMap.label, GlobalTagMap.tag_name).\
337  filter(GlobalTagMap.global_tag_name == name).\
338  order_by(GlobalTagMap.record, GlobalTagMap.label).\
339  all()
340  except sqlalchemy.exc.OperationalError:
341  sys.stderr.write("No table for GlobalTags found in DB.\n\n")
342  return None
343 
def all(container)
workaround iterator generators for ROOT classes
Definition: cmstools.py:25
def globalTagInfo(session, name)
Definition: conddb2hdf5.py:330
def _exists(session, primary_key, value)
Definition: conddb2hdf5.py:61

◆ main()

def conddb2hdf5.main ( )

Definition at line 544 of file conddb2hdf5.py.

References connect(), beamvalidation.exit(), print(), and hdf5Writer.writeH5File().

544 def main():
545  parser = argparse.ArgumentParser(description='Read from CMS Condition DB and write to HDF5 file')
546  parser.add_argument('--db', '-d', default='pro', help='Database to run the command on. Run the help subcommand for more information: conddb help')
547  parser.add_argument('name', nargs='+', help="Name of the global tag.")
548  parser.add_argument('--verbose', '-v', action='count', help='Verbosity level. -v prints debugging information of this tool, like tracebacks in case of errors. -vv prints, in addition, all SQL statements issued. -vvv prints, in addition, all results returned by queries.')
549  parser.add_argument('--authPath','-a', default=None, help='Path of the authentication .netrc file. Default: the content of the COND_AUTH_PATH environment variable, when specified.')
550  parser.add_argument('--snapshot', '-T', default=None, help="Snapshot time. If provided, the output will represent the state of the IOVs inserted into database up to the given time. The format of the string must be one of the following: '2013-01-20', '2013-01-20 10:11:12' or '2013-01-20 10:11:12.123123'.")
551  parser.add_argument('--exclude', '-e', nargs='*', help = 'list of records to exclude from the file (can not be used with --include)')
552  parser.add_argument('--include', '-i', nargs='*', help = 'lost of the only records that should be included in the file (can not be used with --exclude')
553  parser.add_argument('--output', '-o', default='test.h5cond', help='name of hdf5 output file to write')
554  parser.add_argument('--compressor', '-c', default='zlib', choices =['zlib','lzma','none'], help="compress data using 'zlib', 'lzma' or 'none'")
555  args = parser.parse_args()
556 
557  if args.exclude and args.include:
558  print("Can not use --exclude and --include at the same time")
559  exit(-1)
560 
561  connection = connect(args)
562  session = connection.session()
563 
564  excludeRecords = set()
565  if args.exclude:
566  excludeRecords = set(args.exclude)
567  includeRecords = set()
568  if args.include:
569  includeRecords = set(args.include)
570 
571  writeH5File(args.output, args.name, excludeRecords, includeRecords, lambda x: DBGlobalTag(args, session, x), args.compressor )
572 
def writeH5File(fileName, globalTags, excludeRecords, includeRecords, tagReader, compressorName)
Definition: hdf5Writer.py:70
void print(TMatrixD &m, const char *label=nullptr, bool mathematicaFormat=false)
Definition: Utilities.cc:47
def connect(args, init=False, read_only=True, as_admin=False)
Definition: conddb2hdf5.py:111
def exit(msg="")

◆ mergeIOVs()

def conddb2hdf5.mergeIOVs (   previousIOV,
  newIOV 
)

Definition at line 427 of file conddb2hdf5.py.

References _checkMerge(), and mps_setup.append.

Referenced by conddb2hdf5.DBTag.iovsNPayloadNames().

427 def mergeIOVs(previousIOV, newIOV):
428  debugCopy = copy.deepcopy(previousIOV)
429  previousSize = len(previousIOV)
430  newSize = len(newIOV)
431  previousIndex = 0
432  newIndex =0
433  nExistingDataProducts = len(previousIOV[0][1])
434  while newIndex < newSize and previousIndex < previousSize:
435 
436  previousSince = previousIOV[previousIndex][0]
437  newSince = newIOV[newIndex][0]
438  if previousSince == newSince:
439  previousIOV[previousIndex][1].append(newIOV[newIndex][1])
440  newIndex +=1
441  previousIndex +=1
442  continue
443  elif newSince < previousSince:
444  if previousIndex == 0:
445  payloads = [None]*nExistingDataProducts
446  payloads.append(newIOV[newIndex][1])
447  previousIOV.insert(0,[newSince,payloads])
448  else:
449  payloads = previousIOV[previousIndex-1][1][:nExistingDataProducts]
450  payloads.append(newIOV[newIndex][1])
451  previousIOV.insert(previousIndex,[newSince,payloads])
452  newIndex +=1
453  previousIndex +=1
454  previousSize +=1
455  elif newSince > previousSince:
456  if newIndex == 0:
457  previousIOV[previousIndex][1].append(None)
458  else:
459  if len(previousIOV[previousIndex][1]) == nExistingDataProducts:
460  previousIOV[previousIndex][1].append(newIOV[newIndex-1][1])
461  previousIndex +=1
462  if newIndex != newSize:
463  #print("NEED TO EXTEND")
464  #need to append to end
465  previousPayloads = previousIOV[-1][1]
466  while newIndex != newSize:
467  newPayloads = previousPayloads[:]
468  newPayloads[nExistingDataProducts] = newIOV[newIndex][1]
469  previousIOV.append([newIOV[newIndex][0], newPayloads])
470  newIndex +=1
471  if previousIndex != previousSize:
472  #need to add new item to all remaining entries
473  while previousIndex < previousSize:
474  previousIOV[previousIndex][1].append(newIOV[-1][1])
475  previousIndex +=1
476  _checkMerge(previousIOV, newIOV, debugCopy, nExistingDataProducts)
477  return previousIOV
478 
def mergeIOVs(previousIOV, newIOV)
Definition: conddb2hdf5.py:427
def _checkMerge(previousIOV, newIOV, debugCopy, nExistingDataProducts)
Definition: conddb2hdf5.py:382

◆ parseSince()

def conddb2hdf5.parseSince (   time_type,
  since 
)

Definition at line 306 of file conddb2hdf5.py.

References _high(), and _low().

Referenced by sinceToIOV().

306 def parseSince(time_type, since):
307  if time_type == conddb.TimeType.Time.value:
308  return (_high(since), _low(since))
309  if time_type == conddb.TimeType.Run.value:
310  return (_high(since), 0)
311  if time_type == conddb.TimeType.Lumi.value:
312  return (_high(since), _low(since))
313 
def parseSince(time_type, since)
Definition: conddb2hdf5.py:306
def _high(n)
Definition: conddb2hdf5.py:130
def _low(n)
Definition: conddb2hdf5.py:133

◆ previousSyncValue()

def conddb2hdf5.previousSyncValue (   syncValue)

Definition at line 314 of file conddb2hdf5.py.

Referenced by sinceToIOV().

314 def previousSyncValue(syncValue):
315  if syncValue[1] == 0:
316  return (syncValue[0]-1, 0xffffffff)
317  return (syncValue[0], syncValue[1]-1)
318 
def previousSyncValue(syncValue)
Definition: conddb2hdf5.py:314

◆ recordToType()

def conddb2hdf5.recordToType (   record)

Definition at line 512 of file conddb2hdf5.py.

Referenced by conddb2hdf5.DBTag.__type().

512 def recordToType(record):
513  import subprocess
514  return subprocess.run(["condRecordToDataProduct",record], capture_output = True, check=True, text=True).stdout
515 
def recordToType(record)
Definition: conddb2hdf5.py:512

◆ sinceToIOV()

def conddb2hdf5.sinceToIOV (   sinceList,
  time_type 
)

Definition at line 319 of file conddb2hdf5.py.

References parseSince(), and previousSyncValue().

Referenced by conddb2hdf5.DBTag.iovsNPayloadNames().

319 def sinceToIOV(sinceList, time_type):
320  firstValues = []
321  lastValues = []
322  for since in sinceList:
323  syncValue = parseSince(time_type, since)
324  firstValues.append(syncValue)
325  if len(firstValues) != 1:
326  lastValues.append(previousSyncValue(syncValue))
327  lastValues.append((0xFFFFFFFF,0xFFFFFFFF))
328  return [firstValues,lastValues]
329 
def sinceToIOV(sinceList, time_type)
Definition: conddb2hdf5.py:319
def parseSince(time_type, since)
Definition: conddb2hdf5.py:306
def previousSyncValue(syncValue)
Definition: conddb2hdf5.py:314

◆ tagInfo()

def conddb2hdf5.tagInfo (   session,
  name,
  snapshot 
)

Definition at line 344 of file conddb2hdf5.py.

References _exists(), _inserted_before(), python.cmstools.all(), ALCARECOTkAlBeamHalo_cff.filter, and createfilelist.int.

Referenced by conddb2hdf5.DBTag.iovsNPayloadNames().

344 def tagInfo(session, name, snapshot):
345  Tag = session.get_dbtype(conddb.Tag)
346  IOV = session.get_dbtype(conddb.IOV)
347  is_tag = _exists(session, Tag.name, name)
348  if is_tag:
349  time_type = session.query(Tag.time_type).\
350  filter(Tag.name == name).\
351  scalar()
352 
353  rawTagInfo = session.query(IOV.since, IOV.insertion_time, IOV.payload_hash).\
354  filter(
355  IOV.tag_name == name,
356  _inserted_before(IOV,snapshot),
357  ).\
358  order_by(IOV.since.desc(), IOV.insertion_time.desc()).\
359  from_self().\
360  order_by(IOV.since, IOV.insertion_time).\
361  all()
362  filteredTagInfo = []
363  lastSince = -1
364  for since,insertion,payload in rawTagInfo:
365  if lastSince == since:
366  continue
367  lastSince = since
368  if time_type == conddb.TimeType.Run.value:
369  #need to make Run and RunLumi directly comparable since some records
370  # use a mix of the two for their IOVs
371  since = int(since) << 32
372  filteredTagInfo.append((since,payload))
373 
374  if time_type == conddb.TimeType.Run.value:
375  time_type = conddb.TimeType.Lumi.value
376 
377  return time_type, filteredTagInfo
378 # [sinceLabel, 'Insertion Time', 'Payload', 'Object Type'],
379 # filters = [_since_filter(time_type), None, None, None],
380 # )
381 
def all(container)
workaround iterator generators for ROOT classes
Definition: cmstools.py:25
def tagInfo(session, name, snapshot)
Definition: conddb2hdf5.py:344
def _inserted_before(_IOV, timestamp)
Definition: conddb2hdf5.py:30
def _exists(session, primary_key, value)
Definition: conddb2hdf5.py:61

◆ timeTypeName()

def conddb2hdf5.timeTypeName (   time_type)

Definition at line 297 of file conddb2hdf5.py.

References str.

Referenced by conddb2hdf5.DBTag.time_type().

297 def timeTypeName(time_type):
298  if time_type == conddb.TimeType.Time.value:
299  return 'time'
300  if time_type == conddb.TimeType.Run.value or time_type == conddb.TimeType.Lumi.value:
301  return 'run_lumi'
302  raise RuntimeError("unknown since time %s:"% str(time_type))
303 
304 
305 
def timeTypeName(time_type)
Definition: conddb2hdf5.py:297
#define str(s)

◆ writeTag()

def conddb2hdf5.writeTag (   tagsGroup,
  time_type,
  IOV_payloads,
  payloadToRefs,
  originalTagNames,
  recName 
)

Definition at line 505 of file conddb2hdf5.py.

References writeTagImpl().

505 def writeTag(tagsGroup, time_type, IOV_payloads, payloadToRefs, originalTagNames, recName):
506  name = originalTagNames[0]
507  if len(originalTagNames) != 1:
508  name = name+"@joined"
509  return writeTagImpl(tagsGroup, name, recName, time_type, IOV_payloads, payloadToRefs, originalTagNames)
510 
511 
def writeTag(tagsGroup, time_type, IOV_payloads, payloadToRefs, originalTagNames, recName)
Definition: conddb2hdf5.py:505
def writeTagImpl(tagsGroup, name, recName, time_type, IOV_payloads, payloadToRefs, originalTagNames)
Definition: conddb2hdf5.py:479

◆ writeTagImpl()

def conddb2hdf5.writeTagImpl (   tagsGroup,
  name,
  recName,
  time_type,
  IOV_payloads,
  payloadToRefs,
  originalTagNames 
)

Definition at line 479 of file conddb2hdf5.py.

Referenced by writeTag().

479 def writeTagImpl(tagsGroup, name, recName, time_type, IOV_payloads, payloadToRefs, originalTagNames):
480  tagGroup = tagsGroup.create_group(name)
481  tagGroup.attrs["time_type"] = time_type.encode("ascii") #timeTypeName(time_type).encode("ascii")
482  tagGroup.attrs["db_tags"] = [x.encode("ascii") for x in originalTagNames]
483  tagGroup.attrs["record"] = recName.encode("ascii")
484  firstValues = [x[0] for x in IOV_payloads]
485  lastValues = [x[1] for x in IOV_payloads]
486  syncValueType = np.dtype([("high", np.uint32),("low", np.uint32)])
487  first_np = np.empty(shape=(len(IOV_payloads),), dtype=syncValueType)
488  first_np['high'] = [ x.high for x in firstValues]
489  first_np['low'] = [ x.low for x in firstValues]
490  last_np = np.empty(shape=(len(lastValues),), dtype=syncValueType)
491  last_np['high'] = [ x.high for x in lastValues]
492  last_np['low'] = [ x.low for x in lastValues]
493  #tagGroup.create_dataset("first",data=np.array(firstValues), dtype=syncValueType)
494  #tagGroup.create_dataset("last", data=np.array(lastValues),dtype=syncValueType)
495  payloads = [ [ payloadToRefs[y] for y in x[2]] for x in IOV_payloads]
496  compressor = None
497  if len(first_np) > 100:
498  compressor = 'gzip'
499  tagGroup.create_dataset("first",data=first_np, compression = compressor)
500  tagGroup.create_dataset("last",data=last_np, compression = compressor)
501  tagGroup.create_dataset("payload", data=payloads, dtype=h5py.ref_dtype, compression = compressor)
502  return tagGroup.ref
503 
504 
def writeTagImpl(tagsGroup, name, recName, time_type, IOV_payloads, payloadToRefs, originalTagNames)
Definition: conddb2hdf5.py:479

Variable Documentation

◆ __typedefs

dictionary conddb2hdf5.__typedefs
private
Initial value:
1 = {b"ESCondObjectContainer<ESPedestal>":"ESPedestals",
2  b"ESCondObjectContainer<float>":"ESFloatCondObjectContainer",
3  b"ESCondObjectContainer<ESChannelStatusCode>":"ESChannelStatus",
4  b"EcalCondObjectContainer<EcalPedestal>":"EcalPedestals",
5  b"EcalCondObjectContainer<EcalXtalGroupId>":"EcalWeightXtalGroups",
6  b"EcalCondObjectContainer<EcalMGPAGainRatio>":"EcalGainRatios",
7  b"EcalCondObjectContainer<float>":"EcalFloatCondObjectContainer",
8  b"EcalCondObjectContainer<EcalChannelStatusCode>":"EcalChannelStatus",
9  b"EcalCondObjectContainer<EcalMappingElement>":"EcalMappingElectronics",
10  b"EcalCondObjectContainer<EcalTPGPedestal>":"EcalTPGPedestals",
11  b"EcalCondObjectContainer<EcalTPGLinearizationConstant>":"EcalTPGLinearizationConst",
12  b"EcalCondObjectContainer<EcalTPGCrystalStatusCode>":"EcalTPGCrystalStatus",
13  b"EcalCondTowerObjectContainer<EcalChannelStatusCode>":"EcalDCSTowerStatus",
14  b"EcalCondTowerObjectContainer<EcalDAQStatusCode>":"EcalDAQTowerStatus",
15  b"EcalCondObjectContainer<EcalDQMStatusCode>":"EcalDQMChannelStatus",
16  b"EcalCondTowerObjectContainer<EcalDQMStatusCode>":"EcalDQMTowerStatus",
17  b"EcalCondObjectContainer<EcalPulseShape>":"EcalPulseShapes",
18  b"EcalCondObjectContainer<EcalPulseCovariance>":"EcalPulseCovariances",
19  b"EcalCondObjectContainer<EcalPulseSymmCovariance>":"EcalPulseSymmCovariances",
20  b"HcalItemCollById<HFPhase1PMTData>": "HFPhase1PMTParams",
21  b"l1t::CaloParams":"CaloParams",
22  b"StorableDoubleMap<AbsOOTPileupCorrection>":"OOTPileupCorrectionMapColl",
23  b"PhysicsTools::Calibration::Histogram3D<double,double,double,double>":"PhysicsTools::Calibration::HistogramD3D",
24  b"PhysicsTools::Calibration::MVAComputerContainer":"MVAComputerContainer"
25 }

Definition at line 516 of file conddb2hdf5.py.